dwarf2out: Always emit required 0 entries for DWARF 5 in *.debug_line [PR98796]
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2021 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 dwarf_offset_size. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (dwarf_offset_size == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (dwarf_offset_size == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (dwarf_offset_size == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (dwarf_offset_size == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (dwarf_offset_size == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 if (debug_info_level <= DINFO_LEVEL_TERSE)
403 return false;
404
405 enum debug_struct_file criterion;
406 tree type_decl;
407 bool generic = lang_hooks.types.generic_p (type);
408
409 if (generic)
410 criterion = debug_struct_generic[usage];
411 else
412 criterion = debug_struct_ordinary[usage];
413
414 if (criterion == DINFO_STRUCT_FILE_NONE)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
416 if (criterion == DINFO_STRUCT_FILE_ANY)
417 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
418
419 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
420
421 if (type_decl != NULL)
422 {
423 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
425
426 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
427 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
428 }
429
430 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
431 }
432 \f
433 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
434 switch to the data section instead, and write out a synthetic start label
435 for collect2 the first time around. */
436
437 static void
438 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
439 {
440 if (eh_frame_section == 0)
441 {
442 int flags;
443
444 if (EH_TABLES_CAN_BE_READ_ONLY)
445 {
446 int fde_encoding;
447 int per_encoding;
448 int lsda_encoding;
449
450 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
451 /*global=*/0);
452 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
453 /*global=*/1);
454 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
455 /*global=*/0);
456 flags = ((! flag_pic
457 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
458 && (fde_encoding & 0x70) != DW_EH_PE_aligned
459 && (per_encoding & 0x70) != DW_EH_PE_absptr
460 && (per_encoding & 0x70) != DW_EH_PE_aligned
461 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
462 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
463 ? 0 : SECTION_WRITE);
464 }
465 else
466 flags = SECTION_WRITE;
467
468 #ifdef EH_FRAME_SECTION_NAME
469 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
470 #else
471 eh_frame_section = ((flags == SECTION_WRITE)
472 ? data_section : readonly_data_section);
473 #endif /* EH_FRAME_SECTION_NAME */
474 }
475
476 switch_to_section (eh_frame_section);
477
478 #ifdef EH_FRAME_THROUGH_COLLECT2
479 /* We have no special eh_frame section. Emit special labels to guide
480 collect2. */
481 if (!back)
482 {
483 tree label = get_file_function_name ("F");
484 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
485 targetm.asm_out.globalize_label (asm_out_file,
486 IDENTIFIER_POINTER (label));
487 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
488 }
489 #endif
490 }
491
492 /* Switch [BACK] to the eh or debug frame table section, depending on
493 FOR_EH. */
494
495 static void
496 switch_to_frame_table_section (int for_eh, bool back)
497 {
498 if (for_eh)
499 switch_to_eh_frame_section (back);
500 else
501 {
502 if (!debug_frame_section)
503 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
504 SECTION_DEBUG, NULL);
505 switch_to_section (debug_frame_section);
506 }
507 }
508
509 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
510
511 enum dw_cfi_oprnd_type
512 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
513 {
514 switch (cfi)
515 {
516 case DW_CFA_nop:
517 case DW_CFA_GNU_window_save:
518 case DW_CFA_remember_state:
519 case DW_CFA_restore_state:
520 return dw_cfi_oprnd_unused;
521
522 case DW_CFA_set_loc:
523 case DW_CFA_advance_loc1:
524 case DW_CFA_advance_loc2:
525 case DW_CFA_advance_loc4:
526 case DW_CFA_MIPS_advance_loc8:
527 return dw_cfi_oprnd_addr;
528
529 case DW_CFA_offset:
530 case DW_CFA_offset_extended:
531 case DW_CFA_def_cfa:
532 case DW_CFA_offset_extended_sf:
533 case DW_CFA_def_cfa_sf:
534 case DW_CFA_restore:
535 case DW_CFA_restore_extended:
536 case DW_CFA_undefined:
537 case DW_CFA_same_value:
538 case DW_CFA_def_cfa_register:
539 case DW_CFA_register:
540 case DW_CFA_expression:
541 case DW_CFA_val_expression:
542 return dw_cfi_oprnd_reg_num;
543
544 case DW_CFA_def_cfa_offset:
545 case DW_CFA_GNU_args_size:
546 case DW_CFA_def_cfa_offset_sf:
547 return dw_cfi_oprnd_offset;
548
549 case DW_CFA_def_cfa_expression:
550 return dw_cfi_oprnd_loc;
551
552 default:
553 gcc_unreachable ();
554 }
555 }
556
557 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
558
559 enum dw_cfi_oprnd_type
560 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
561 {
562 switch (cfi)
563 {
564 case DW_CFA_def_cfa:
565 case DW_CFA_def_cfa_sf:
566 case DW_CFA_offset:
567 case DW_CFA_offset_extended_sf:
568 case DW_CFA_offset_extended:
569 return dw_cfi_oprnd_offset;
570
571 case DW_CFA_register:
572 return dw_cfi_oprnd_reg_num;
573
574 case DW_CFA_expression:
575 case DW_CFA_val_expression:
576 return dw_cfi_oprnd_loc;
577
578 case DW_CFA_def_cfa_expression:
579 return dw_cfi_oprnd_cfa_loc;
580
581 default:
582 return dw_cfi_oprnd_unused;
583 }
584 }
585
586 /* Output one FDE. */
587
588 static void
589 output_fde (dw_fde_ref fde, bool for_eh, bool second,
590 char *section_start_label, int fde_encoding, char *augmentation,
591 bool any_lsda_needed, int lsda_encoding)
592 {
593 const char *begin, *end;
594 static unsigned int j;
595 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
596
597 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
598 /* empty */ 0);
599 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
600 for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
602 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
603 if (!XCOFF_DEBUGGING_INFO || for_eh)
604 {
605 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh)
606 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
607 " indicating 64-bit DWARF extension");
608 dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1,
609 "FDE Length");
610 }
611 ASM_OUTPUT_LABEL (asm_out_file, l1);
612
613 if (for_eh)
614 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
615 else
616 dw2_asm_output_offset (dwarf_offset_size, section_start_label,
617 debug_frame_section, "FDE CIE offset");
618
619 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
620 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
621
622 if (for_eh)
623 {
624 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
625 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
626 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
627 "FDE initial location");
628 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
629 end, begin, "FDE address range");
630 }
631 else
632 {
633 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
634 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
635 }
636
637 if (augmentation[0])
638 {
639 if (any_lsda_needed)
640 {
641 int size = size_of_encoded_value (lsda_encoding);
642
643 if (lsda_encoding == DW_EH_PE_aligned)
644 {
645 int offset = ( 4 /* Length */
646 + 4 /* CIE offset */
647 + 2 * size_of_encoded_value (fde_encoding)
648 + 1 /* Augmentation size */ );
649 int pad = -offset & (PTR_SIZE - 1);
650
651 size += pad;
652 gcc_assert (size_of_uleb128 (size) == 1);
653 }
654
655 dw2_asm_output_data_uleb128 (size, "Augmentation size");
656
657 if (fde->uses_eh_lsda)
658 {
659 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
660 fde->funcdef_number);
661 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
662 gen_rtx_SYMBOL_REF (Pmode, l1),
663 false,
664 "Language Specific Data Area");
665 }
666 else
667 {
668 if (lsda_encoding == DW_EH_PE_aligned)
669 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
670 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
671 "Language Specific Data Area (none)");
672 }
673 }
674 else
675 dw2_asm_output_data_uleb128 (0, "Augmentation size");
676 }
677
678 /* Loop through the Call Frame Instructions associated with this FDE. */
679 fde->dw_fde_current_label = begin;
680 {
681 size_t from, until, i;
682
683 from = 0;
684 until = vec_safe_length (fde->dw_fde_cfi);
685
686 if (fde->dw_fde_second_begin == NULL)
687 ;
688 else if (!second)
689 until = fde->dw_fde_switch_cfi_index;
690 else
691 from = fde->dw_fde_switch_cfi_index;
692
693 for (i = from; i < until; i++)
694 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
695 }
696
697 /* If we are to emit a ref/link from function bodies to their frame tables,
698 do it now. This is typically performed to make sure that tables
699 associated with functions are dragged with them and not discarded in
700 garbage collecting links. We need to do this on a per function basis to
701 cope with -ffunction-sections. */
702
703 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
704 /* Switch to the function section, emit the ref to the tables, and
705 switch *back* into the table section. */
706 switch_to_section (function_section (fde->decl));
707 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
708 switch_to_frame_table_section (for_eh, true);
709 #endif
710
711 /* Pad the FDE out to an address sized boundary. */
712 ASM_OUTPUT_ALIGN (asm_out_file,
713 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
714 ASM_OUTPUT_LABEL (asm_out_file, l2);
715
716 j += 2;
717 }
718
719 /* Return true if frame description entry FDE is needed for EH. */
720
721 static bool
722 fde_needed_for_eh_p (dw_fde_ref fde)
723 {
724 if (flag_asynchronous_unwind_tables)
725 return true;
726
727 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
728 return true;
729
730 if (fde->uses_eh_lsda)
731 return true;
732
733 /* If exceptions are enabled, we have collected nothrow info. */
734 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
735 return false;
736
737 return true;
738 }
739
740 /* Output the call frame information used to record information
741 that relates to calculating the frame pointer, and records the
742 location of saved registers. */
743
744 static void
745 output_call_frame_info (int for_eh)
746 {
747 unsigned int i;
748 dw_fde_ref fde;
749 dw_cfi_ref cfi;
750 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
751 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
752 bool any_lsda_needed = false;
753 char augmentation[6];
754 int augmentation_size;
755 int fde_encoding = DW_EH_PE_absptr;
756 int per_encoding = DW_EH_PE_absptr;
757 int lsda_encoding = DW_EH_PE_absptr;
758 int return_reg;
759 rtx personality = NULL;
760 int dw_cie_version;
761
762 /* Don't emit a CIE if there won't be any FDEs. */
763 if (!fde_vec)
764 return;
765
766 /* Nothing to do if the assembler's doing it all. */
767 if (dwarf2out_do_cfi_asm ())
768 return;
769
770 /* If we don't have any functions we'll want to unwind out of, don't emit
771 any EH unwind information. If we make FDEs linkonce, we may have to
772 emit an empty label for an FDE that wouldn't otherwise be emitted. We
773 want to avoid having an FDE kept around when the function it refers to
774 is discarded. Example where this matters: a primary function template
775 in C++ requires EH information, an explicit specialization doesn't. */
776 if (for_eh)
777 {
778 bool any_eh_needed = false;
779
780 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
781 {
782 if (fde->uses_eh_lsda)
783 any_eh_needed = any_lsda_needed = true;
784 else if (fde_needed_for_eh_p (fde))
785 any_eh_needed = true;
786 else if (TARGET_USES_WEAK_UNWIND_INFO)
787 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
788 }
789
790 if (!any_eh_needed)
791 return;
792 }
793
794 /* We're going to be generating comments, so turn on app. */
795 if (flag_debug_asm)
796 app_enable ();
797
798 /* Switch to the proper frame section, first time. */
799 switch_to_frame_table_section (for_eh, false);
800
801 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
802 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
803
804 /* Output the CIE. */
805 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
806 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
807 if (!XCOFF_DEBUGGING_INFO || for_eh)
808 {
809 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh)
810 dw2_asm_output_data (4, 0xffffffff,
811 "Initial length escape value indicating 64-bit DWARF extension");
812 dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1,
813 "Length of Common Information Entry");
814 }
815 ASM_OUTPUT_LABEL (asm_out_file, l1);
816
817 /* Now that the CIE pointer is PC-relative for EH,
818 use 0 to identify the CIE. */
819 dw2_asm_output_data ((for_eh ? 4 : dwarf_offset_size),
820 (for_eh ? 0 : DWARF_CIE_ID),
821 "CIE Identifier Tag");
822
823 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
824 use CIE version 1, unless that would produce incorrect results
825 due to overflowing the return register column. */
826 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
827 dw_cie_version = 1;
828 if (return_reg >= 256 || dwarf_version > 2)
829 dw_cie_version = 3;
830 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
831
832 augmentation[0] = 0;
833 augmentation_size = 0;
834
835 personality = current_unit_personality;
836 if (for_eh)
837 {
838 char *p;
839
840 /* Augmentation:
841 z Indicates that a uleb128 is present to size the
842 augmentation section.
843 L Indicates the encoding (and thus presence) of
844 an LSDA pointer in the FDE augmentation.
845 R Indicates a non-default pointer encoding for
846 FDE code pointers.
847 P Indicates the presence of an encoding + language
848 personality routine in the CIE augmentation. */
849
850 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
851 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
852 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
853
854 p = augmentation + 1;
855 if (personality)
856 {
857 *p++ = 'P';
858 augmentation_size += 1 + size_of_encoded_value (per_encoding);
859 assemble_external_libcall (personality);
860 }
861 if (any_lsda_needed)
862 {
863 *p++ = 'L';
864 augmentation_size += 1;
865 }
866 if (fde_encoding != DW_EH_PE_absptr)
867 {
868 *p++ = 'R';
869 augmentation_size += 1;
870 }
871 if (p > augmentation + 1)
872 {
873 augmentation[0] = 'z';
874 *p = '\0';
875 }
876
877 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
878 if (personality && per_encoding == DW_EH_PE_aligned)
879 {
880 int offset = ( 4 /* Length */
881 + 4 /* CIE Id */
882 + 1 /* CIE version */
883 + strlen (augmentation) + 1 /* Augmentation */
884 + size_of_uleb128 (1) /* Code alignment */
885 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
886 + 1 /* RA column */
887 + 1 /* Augmentation size */
888 + 1 /* Personality encoding */ );
889 int pad = -offset & (PTR_SIZE - 1);
890
891 augmentation_size += pad;
892
893 /* Augmentations should be small, so there's scarce need to
894 iterate for a solution. Die if we exceed one uleb128 byte. */
895 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
896 }
897 }
898
899 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
900 if (dw_cie_version >= 4)
901 {
902 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
903 dw2_asm_output_data (1, 0, "CIE Segment Size");
904 }
905 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
906 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
907 "CIE Data Alignment Factor");
908
909 if (dw_cie_version == 1)
910 dw2_asm_output_data (1, return_reg, "CIE RA Column");
911 else
912 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
913
914 if (augmentation[0])
915 {
916 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
917 if (personality)
918 {
919 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
920 eh_data_format_name (per_encoding));
921 dw2_asm_output_encoded_addr_rtx (per_encoding,
922 personality,
923 true, NULL);
924 }
925
926 if (any_lsda_needed)
927 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
928 eh_data_format_name (lsda_encoding));
929
930 if (fde_encoding != DW_EH_PE_absptr)
931 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
932 eh_data_format_name (fde_encoding));
933 }
934
935 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
936 output_cfi (cfi, NULL, for_eh);
937
938 /* Pad the CIE out to an address sized boundary. */
939 ASM_OUTPUT_ALIGN (asm_out_file,
940 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
941 ASM_OUTPUT_LABEL (asm_out_file, l2);
942
943 /* Loop through all of the FDE's. */
944 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
945 {
946 unsigned int k;
947
948 /* Don't emit EH unwind info for leaf functions that don't need it. */
949 if (for_eh && !fde_needed_for_eh_p (fde))
950 continue;
951
952 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
953 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
954 augmentation, any_lsda_needed, lsda_encoding);
955 }
956
957 if (for_eh && targetm.terminate_dw2_eh_frame_info)
958 dw2_asm_output_data (4, 0, "End of Table");
959
960 /* Turn off app to make assembly quicker. */
961 if (flag_debug_asm)
962 app_disable ();
963 }
964
965 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
966
967 static void
968 dwarf2out_do_cfi_startproc (bool second)
969 {
970 int enc;
971 rtx ref;
972
973 fprintf (asm_out_file, "\t.cfi_startproc\n");
974
975 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
976
977 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
978 eh unwinders. */
979 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
980 return;
981
982 rtx personality = get_personality_function (current_function_decl);
983
984 if (personality)
985 {
986 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
987 ref = personality;
988
989 /* ??? The GAS support isn't entirely consistent. We have to
990 handle indirect support ourselves, but PC-relative is done
991 in the assembler. Further, the assembler can't handle any
992 of the weirder relocation types. */
993 if (enc & DW_EH_PE_indirect)
994 {
995 if (targetm.asm_out.make_eh_symbol_indirect != NULL)
996 ref = targetm.asm_out.make_eh_symbol_indirect (ref, true);
997 else
998 ref = dw2_force_const_mem (ref, true);
999 }
1000
1001 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
1002 output_addr_const (asm_out_file, ref);
1003 fputc ('\n', asm_out_file);
1004 }
1005
1006 if (crtl->uses_eh_lsda)
1007 {
1008 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1009
1010 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1011 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1012 current_function_funcdef_no);
1013 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1014 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1015
1016 if (enc & DW_EH_PE_indirect)
1017 {
1018 if (targetm.asm_out.make_eh_symbol_indirect != NULL)
1019 ref = targetm.asm_out.make_eh_symbol_indirect (ref, true);
1020 else
1021 ref = dw2_force_const_mem (ref, true);
1022 }
1023
1024 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1025 output_addr_const (asm_out_file, ref);
1026 fputc ('\n', asm_out_file);
1027 }
1028 }
1029
1030 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1031 this allocation may be done before pass_final. */
1032
1033 dw_fde_ref
1034 dwarf2out_alloc_current_fde (void)
1035 {
1036 dw_fde_ref fde;
1037
1038 fde = ggc_cleared_alloc<dw_fde_node> ();
1039 fde->decl = current_function_decl;
1040 fde->funcdef_number = current_function_funcdef_no;
1041 fde->fde_index = vec_safe_length (fde_vec);
1042 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1043 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1044 fde->nothrow = crtl->nothrow;
1045 fde->drap_reg = INVALID_REGNUM;
1046 fde->vdrap_reg = INVALID_REGNUM;
1047
1048 /* Record the FDE associated with this function. */
1049 cfun->fde = fde;
1050 vec_safe_push (fde_vec, fde);
1051
1052 return fde;
1053 }
1054
1055 /* Output a marker (i.e. a label) for the beginning of a function, before
1056 the prologue. */
1057
1058 void
1059 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1060 unsigned int column ATTRIBUTE_UNUSED,
1061 const char *file ATTRIBUTE_UNUSED)
1062 {
1063 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1064 char * dup_label;
1065 dw_fde_ref fde;
1066 section *fnsec;
1067 bool do_frame;
1068
1069 current_function_func_begin_label = NULL;
1070
1071 do_frame = dwarf2out_do_frame ();
1072
1073 /* ??? current_function_func_begin_label is also used by except.c for
1074 call-site information. We must emit this label if it might be used. */
1075 if (!do_frame
1076 && (!flag_exceptions
1077 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1078 return;
1079
1080 fnsec = function_section (current_function_decl);
1081 switch_to_section (fnsec);
1082 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1083 current_function_funcdef_no);
1084 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1085 current_function_funcdef_no);
1086 dup_label = xstrdup (label);
1087 current_function_func_begin_label = dup_label;
1088
1089 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1090 if (!do_frame)
1091 return;
1092
1093 /* Unlike the debug version, the EH version of frame unwind info is a per-
1094 function setting so we need to record whether we need it for the unit. */
1095 do_eh_frame |= dwarf2out_do_eh_frame ();
1096
1097 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1098 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1099 would include pass_dwarf2_frame. If we've not created the FDE yet,
1100 do so now. */
1101 fde = cfun->fde;
1102 if (fde == NULL)
1103 fde = dwarf2out_alloc_current_fde ();
1104
1105 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1106 fde->dw_fde_begin = dup_label;
1107 fde->dw_fde_current_label = dup_label;
1108 fde->in_std_section = (fnsec == text_section
1109 || (cold_text_section && fnsec == cold_text_section));
1110
1111 /* We only want to output line number information for the genuine dwarf2
1112 prologue case, not the eh frame case. */
1113 #ifdef DWARF2_DEBUGGING_INFO
1114 if (file)
1115 dwarf2out_source_line (line, column, file, 0, true);
1116 #endif
1117
1118 if (dwarf2out_do_cfi_asm ())
1119 dwarf2out_do_cfi_startproc (false);
1120 else
1121 {
1122 rtx personality = get_personality_function (current_function_decl);
1123 if (!current_unit_personality)
1124 current_unit_personality = personality;
1125
1126 /* We cannot keep a current personality per function as without CFI
1127 asm, at the point where we emit the CFI data, there is no current
1128 function anymore. */
1129 if (personality && current_unit_personality != personality)
1130 sorry ("multiple EH personalities are supported only with assemblers "
1131 "supporting %<.cfi_personality%> directive");
1132 }
1133 }
1134
1135 /* Output a marker (i.e. a label) for the end of the generated code
1136 for a function prologue. This gets called *after* the prologue code has
1137 been generated. */
1138
1139 void
1140 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1141 const char *file ATTRIBUTE_UNUSED)
1142 {
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 /* Output a label to mark the endpoint of the code generated for this
1146 function. */
1147 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1148 current_function_funcdef_no);
1149 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1150 current_function_funcdef_no);
1151 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1152 }
1153
1154 /* Output a marker (i.e. a label) for the beginning of the generated code
1155 for a function epilogue. This gets called *before* the prologue code has
1156 been generated. */
1157
1158 void
1159 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1160 const char *file ATTRIBUTE_UNUSED)
1161 {
1162 dw_fde_ref fde = cfun->fde;
1163 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1164
1165 if (fde->dw_fde_vms_begin_epilogue)
1166 return;
1167
1168 /* Output a label to mark the endpoint of the code generated for this
1169 function. */
1170 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1171 current_function_funcdef_no);
1172 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1173 current_function_funcdef_no);
1174 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1175 }
1176
1177 /* Output a marker (i.e. a label) for the absolute end of the generated code
1178 for a function definition. This gets called *after* the epilogue code has
1179 been generated. */
1180
1181 void
1182 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1183 const char *file ATTRIBUTE_UNUSED)
1184 {
1185 dw_fde_ref fde;
1186 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1187
1188 last_var_location_insn = NULL;
1189 cached_next_real_insn = NULL;
1190
1191 if (dwarf2out_do_cfi_asm ())
1192 fprintf (asm_out_file, "\t.cfi_endproc\n");
1193
1194 /* Output a label to mark the endpoint of the code generated for this
1195 function. */
1196 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1197 current_function_funcdef_no);
1198 ASM_OUTPUT_LABEL (asm_out_file, label);
1199 fde = cfun->fde;
1200 gcc_assert (fde != NULL);
1201 if (fde->dw_fde_second_begin == NULL)
1202 fde->dw_fde_end = xstrdup (label);
1203 }
1204
1205 void
1206 dwarf2out_frame_finish (void)
1207 {
1208 /* Output call frame information. */
1209 if (targetm.debug_unwind_info () == UI_DWARF2)
1210 output_call_frame_info (0);
1211
1212 /* Output another copy for the unwinder. */
1213 if (do_eh_frame)
1214 output_call_frame_info (1);
1215 }
1216
1217 /* Note that the current function section is being used for code. */
1218
1219 static void
1220 dwarf2out_note_section_used (void)
1221 {
1222 section *sec = current_function_section ();
1223 if (sec == text_section)
1224 text_section_used = true;
1225 else if (sec == cold_text_section)
1226 cold_text_section_used = true;
1227 }
1228
1229 static void var_location_switch_text_section (void);
1230 static void set_cur_line_info_table (section *);
1231
1232 void
1233 dwarf2out_switch_text_section (void)
1234 {
1235 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1236 section *sect;
1237 dw_fde_ref fde = cfun->fde;
1238
1239 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1240
1241 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1242 current_function_funcdef_no);
1243
1244 fde->dw_fde_second_begin = ggc_strdup (label);
1245 if (!in_cold_section_p)
1246 {
1247 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1248 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1249 }
1250 else
1251 {
1252 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1253 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1254 }
1255 have_multiple_function_sections = true;
1256
1257 /* There is no need to mark used sections when not debugging. */
1258 if (cold_text_section != NULL)
1259 dwarf2out_note_section_used ();
1260
1261 if (dwarf2out_do_cfi_asm ())
1262 fprintf (asm_out_file, "\t.cfi_endproc\n");
1263
1264 /* Now do the real section switch. */
1265 sect = current_function_section ();
1266 switch_to_section (sect);
1267
1268 fde->second_in_std_section
1269 = (sect == text_section
1270 || (cold_text_section && sect == cold_text_section));
1271
1272 if (dwarf2out_do_cfi_asm ())
1273 dwarf2out_do_cfi_startproc (true);
1274
1275 var_location_switch_text_section ();
1276
1277 if (cold_text_section != NULL)
1278 set_cur_line_info_table (sect);
1279 }
1280 \f
1281 /* And now, the subset of the debugging information support code necessary
1282 for emitting location expressions. */
1283
1284 /* Data about a single source file. */
1285 struct GTY((for_user)) dwarf_file_data {
1286 const char * filename;
1287 int emitted_number;
1288 };
1289
1290 /* Describe an entry into the .debug_addr section. */
1291
1292 enum ate_kind {
1293 ate_kind_rtx,
1294 ate_kind_rtx_dtprel,
1295 ate_kind_label
1296 };
1297
1298 struct GTY((for_user)) addr_table_entry {
1299 enum ate_kind kind;
1300 unsigned int refcount;
1301 unsigned int index;
1302 union addr_table_entry_struct_union
1303 {
1304 rtx GTY ((tag ("0"))) rtl;
1305 char * GTY ((tag ("1"))) label;
1306 }
1307 GTY ((desc ("%1.kind"))) addr;
1308 };
1309
1310 typedef unsigned int var_loc_view;
1311
1312 /* Location lists are ranges + location descriptions for that range,
1313 so you can track variables that are in different places over
1314 their entire life. */
1315 typedef struct GTY(()) dw_loc_list_struct {
1316 dw_loc_list_ref dw_loc_next;
1317 const char *begin; /* Label and addr_entry for start of range */
1318 addr_table_entry *begin_entry;
1319 const char *end; /* Label for end of range */
1320 char *ll_symbol; /* Label for beginning of location list.
1321 Only on head of list. */
1322 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1323 const char *section; /* Section this loclist is relative to */
1324 dw_loc_descr_ref expr;
1325 var_loc_view vbegin, vend;
1326 hashval_t hash;
1327 /* True if all addresses in this and subsequent lists are known to be
1328 resolved. */
1329 bool resolved_addr;
1330 /* True if this list has been replaced by dw_loc_next. */
1331 bool replaced;
1332 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1333 section. */
1334 unsigned char emitted : 1;
1335 /* True if hash field is index rather than hash value. */
1336 unsigned char num_assigned : 1;
1337 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1338 unsigned char offset_emitted : 1;
1339 /* True if note_variable_value_in_expr has been called on it. */
1340 unsigned char noted_variable_value : 1;
1341 /* True if the range should be emitted even if begin and end
1342 are the same. */
1343 bool force;
1344 } dw_loc_list_node;
1345
1346 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1347 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1348
1349 /* Convert a DWARF stack opcode into its string name. */
1350
1351 static const char *
1352 dwarf_stack_op_name (unsigned int op)
1353 {
1354 const char *name = get_DW_OP_name (op);
1355
1356 if (name != NULL)
1357 return name;
1358
1359 return "OP_<unknown>";
1360 }
1361
1362 /* Return TRUE iff we're to output location view lists as a separate
1363 attribute next to the location lists, as an extension compatible
1364 with DWARF 2 and above. */
1365
1366 static inline bool
1367 dwarf2out_locviews_in_attribute ()
1368 {
1369 return debug_variable_location_views == 1;
1370 }
1371
1372 /* Return TRUE iff we're to output location view lists as part of the
1373 location lists, as proposed for standardization after DWARF 5. */
1374
1375 static inline bool
1376 dwarf2out_locviews_in_loclist ()
1377 {
1378 #ifndef DW_LLE_view_pair
1379 return false;
1380 #else
1381 return debug_variable_location_views == -1;
1382 #endif
1383 }
1384
1385 /* Return a pointer to a newly allocated location description. Location
1386 descriptions are simple expression terms that can be strung
1387 together to form more complicated location (address) descriptions. */
1388
1389 static inline dw_loc_descr_ref
1390 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1391 unsigned HOST_WIDE_INT oprnd2)
1392 {
1393 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1394
1395 descr->dw_loc_opc = op;
1396 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1397 descr->dw_loc_oprnd1.val_entry = NULL;
1398 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1399 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1400 descr->dw_loc_oprnd2.val_entry = NULL;
1401 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1402
1403 return descr;
1404 }
1405
1406 /* Add a location description term to a location description expression. */
1407
1408 static inline void
1409 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1410 {
1411 dw_loc_descr_ref *d;
1412
1413 /* Find the end of the chain. */
1414 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1415 ;
1416
1417 *d = descr;
1418 }
1419
1420 /* Compare two location operands for exact equality. */
1421
1422 static bool
1423 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1424 {
1425 if (a->val_class != b->val_class)
1426 return false;
1427 switch (a->val_class)
1428 {
1429 case dw_val_class_none:
1430 return true;
1431 case dw_val_class_addr:
1432 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1433
1434 case dw_val_class_offset:
1435 case dw_val_class_unsigned_const:
1436 case dw_val_class_const:
1437 case dw_val_class_unsigned_const_implicit:
1438 case dw_val_class_const_implicit:
1439 case dw_val_class_range_list:
1440 /* These are all HOST_WIDE_INT, signed or unsigned. */
1441 return a->v.val_unsigned == b->v.val_unsigned;
1442
1443 case dw_val_class_loc:
1444 return a->v.val_loc == b->v.val_loc;
1445 case dw_val_class_loc_list:
1446 return a->v.val_loc_list == b->v.val_loc_list;
1447 case dw_val_class_view_list:
1448 return a->v.val_view_list == b->v.val_view_list;
1449 case dw_val_class_die_ref:
1450 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1451 case dw_val_class_fde_ref:
1452 return a->v.val_fde_index == b->v.val_fde_index;
1453 case dw_val_class_symview:
1454 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1455 case dw_val_class_lbl_id:
1456 case dw_val_class_lineptr:
1457 case dw_val_class_macptr:
1458 case dw_val_class_loclistsptr:
1459 case dw_val_class_high_pc:
1460 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1461 case dw_val_class_str:
1462 return a->v.val_str == b->v.val_str;
1463 case dw_val_class_flag:
1464 return a->v.val_flag == b->v.val_flag;
1465 case dw_val_class_file:
1466 case dw_val_class_file_implicit:
1467 return a->v.val_file == b->v.val_file;
1468 case dw_val_class_decl_ref:
1469 return a->v.val_decl_ref == b->v.val_decl_ref;
1470
1471 case dw_val_class_const_double:
1472 return (a->v.val_double.high == b->v.val_double.high
1473 && a->v.val_double.low == b->v.val_double.low);
1474
1475 case dw_val_class_wide_int:
1476 return *a->v.val_wide == *b->v.val_wide;
1477
1478 case dw_val_class_vec:
1479 {
1480 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1481 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1482
1483 return (a_len == b_len
1484 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1485 }
1486
1487 case dw_val_class_data8:
1488 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1489
1490 case dw_val_class_vms_delta:
1491 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1492 && !strcmp (a->v.val_vms_delta.lbl2, b->v.val_vms_delta.lbl2));
1493
1494 case dw_val_class_discr_value:
1495 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1496 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1497 case dw_val_class_discr_list:
1498 /* It makes no sense comparing two discriminant value lists. */
1499 return false;
1500 }
1501 gcc_unreachable ();
1502 }
1503
1504 /* Compare two location atoms for exact equality. */
1505
1506 static bool
1507 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1508 {
1509 if (a->dw_loc_opc != b->dw_loc_opc)
1510 return false;
1511
1512 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1513 address size, but since we always allocate cleared storage it
1514 should be zero for other types of locations. */
1515 if (a->dtprel != b->dtprel)
1516 return false;
1517
1518 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1519 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1520 }
1521
1522 /* Compare two complete location expressions for exact equality. */
1523
1524 bool
1525 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1526 {
1527 while (1)
1528 {
1529 if (a == b)
1530 return true;
1531 if (a == NULL || b == NULL)
1532 return false;
1533 if (!loc_descr_equal_p_1 (a, b))
1534 return false;
1535
1536 a = a->dw_loc_next;
1537 b = b->dw_loc_next;
1538 }
1539 }
1540
1541
1542 /* Add a constant POLY_OFFSET to a location expression. */
1543
1544 static void
1545 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1546 {
1547 dw_loc_descr_ref loc;
1548 HOST_WIDE_INT *p;
1549
1550 gcc_assert (*list_head != NULL);
1551
1552 if (known_eq (poly_offset, 0))
1553 return;
1554
1555 /* Find the end of the chain. */
1556 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1557 ;
1558
1559 HOST_WIDE_INT offset;
1560 if (!poly_offset.is_constant (&offset))
1561 {
1562 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1563 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1564 return;
1565 }
1566
1567 p = NULL;
1568 if (loc->dw_loc_opc == DW_OP_fbreg
1569 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1570 p = &loc->dw_loc_oprnd1.v.val_int;
1571 else if (loc->dw_loc_opc == DW_OP_bregx)
1572 p = &loc->dw_loc_oprnd2.v.val_int;
1573
1574 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1575 offset. Don't optimize if an signed integer overflow would happen. */
1576 if (p != NULL
1577 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1578 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1579 *p += offset;
1580
1581 else if (offset > 0)
1582 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1583
1584 else
1585 {
1586 loc->dw_loc_next
1587 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1588 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1589 }
1590 }
1591
1592 /* Return a pointer to a newly allocated location description for
1593 REG and OFFSET. */
1594
1595 static inline dw_loc_descr_ref
1596 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1597 {
1598 HOST_WIDE_INT const_offset;
1599 if (offset.is_constant (&const_offset))
1600 {
1601 if (reg <= 31)
1602 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1603 const_offset, 0);
1604 else
1605 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1606 }
1607 else
1608 {
1609 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1610 loc_descr_plus_const (&ret, offset);
1611 return ret;
1612 }
1613 }
1614
1615 /* Add a constant OFFSET to a location list. */
1616
1617 static void
1618 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1619 {
1620 dw_loc_list_ref d;
1621 for (d = list_head; d != NULL; d = d->dw_loc_next)
1622 loc_descr_plus_const (&d->expr, offset);
1623 }
1624
1625 #define DWARF_REF_SIZE \
1626 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : dwarf_offset_size)
1627
1628 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1629 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1630 DW_FORM_data16 with 128 bits. */
1631 #define DWARF_LARGEST_DATA_FORM_BITS \
1632 (dwarf_version >= 5 ? 128 : 64)
1633
1634 /* Utility inline function for construction of ops that were GNU extension
1635 before DWARF 5. */
1636 static inline enum dwarf_location_atom
1637 dwarf_OP (enum dwarf_location_atom op)
1638 {
1639 switch (op)
1640 {
1641 case DW_OP_implicit_pointer:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_implicit_pointer;
1644 break;
1645
1646 case DW_OP_entry_value:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_entry_value;
1649 break;
1650
1651 case DW_OP_const_type:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_const_type;
1654 break;
1655
1656 case DW_OP_regval_type:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_regval_type;
1659 break;
1660
1661 case DW_OP_deref_type:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_deref_type;
1664 break;
1665
1666 case DW_OP_convert:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_convert;
1669 break;
1670
1671 case DW_OP_reinterpret:
1672 if (dwarf_version < 5)
1673 return DW_OP_GNU_reinterpret;
1674 break;
1675
1676 case DW_OP_addrx:
1677 if (dwarf_version < 5)
1678 return DW_OP_GNU_addr_index;
1679 break;
1680
1681 case DW_OP_constx:
1682 if (dwarf_version < 5)
1683 return DW_OP_GNU_const_index;
1684 break;
1685
1686 default:
1687 break;
1688 }
1689 return op;
1690 }
1691
1692 /* Similarly for attributes. */
1693 static inline enum dwarf_attribute
1694 dwarf_AT (enum dwarf_attribute at)
1695 {
1696 switch (at)
1697 {
1698 case DW_AT_call_return_pc:
1699 if (dwarf_version < 5)
1700 return DW_AT_low_pc;
1701 break;
1702
1703 case DW_AT_call_tail_call:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_tail_call;
1706 break;
1707
1708 case DW_AT_call_origin:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_target:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_target;
1716 break;
1717
1718 case DW_AT_call_target_clobbered:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_target_clobbered;
1721 break;
1722
1723 case DW_AT_call_parameter:
1724 if (dwarf_version < 5)
1725 return DW_AT_abstract_origin;
1726 break;
1727
1728 case DW_AT_call_value:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_call_site_value;
1731 break;
1732
1733 case DW_AT_call_data_value:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_call_site_data_value;
1736 break;
1737
1738 case DW_AT_call_all_calls:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_all_call_sites;
1741 break;
1742
1743 case DW_AT_call_all_tail_calls:
1744 if (dwarf_version < 5)
1745 return DW_AT_GNU_all_tail_call_sites;
1746 break;
1747
1748 case DW_AT_dwo_name:
1749 if (dwarf_version < 5)
1750 return DW_AT_GNU_dwo_name;
1751 break;
1752
1753 case DW_AT_addr_base:
1754 if (dwarf_version < 5)
1755 return DW_AT_GNU_addr_base;
1756 break;
1757
1758 default:
1759 break;
1760 }
1761 return at;
1762 }
1763
1764 /* And similarly for tags. */
1765 static inline enum dwarf_tag
1766 dwarf_TAG (enum dwarf_tag tag)
1767 {
1768 switch (tag)
1769 {
1770 case DW_TAG_call_site:
1771 if (dwarf_version < 5)
1772 return DW_TAG_GNU_call_site;
1773 break;
1774
1775 case DW_TAG_call_site_parameter:
1776 if (dwarf_version < 5)
1777 return DW_TAG_GNU_call_site_parameter;
1778 break;
1779
1780 default:
1781 break;
1782 }
1783 return tag;
1784 }
1785
1786 /* And similarly for forms. */
1787 static inline enum dwarf_form
1788 dwarf_FORM (enum dwarf_form form)
1789 {
1790 switch (form)
1791 {
1792 case DW_FORM_addrx:
1793 if (dwarf_version < 5)
1794 return DW_FORM_GNU_addr_index;
1795 break;
1796
1797 case DW_FORM_strx:
1798 if (dwarf_version < 5)
1799 return DW_FORM_GNU_str_index;
1800 break;
1801
1802 default:
1803 break;
1804 }
1805 return form;
1806 }
1807
1808 static unsigned long int get_base_type_offset (dw_die_ref);
1809
1810 /* Return the size of a location descriptor. */
1811
1812 static unsigned long
1813 size_of_loc_descr (dw_loc_descr_ref loc)
1814 {
1815 unsigned long size = 1;
1816
1817 switch (loc->dw_loc_opc)
1818 {
1819 case DW_OP_addr:
1820 size += DWARF2_ADDR_SIZE;
1821 break;
1822 case DW_OP_GNU_addr_index:
1823 case DW_OP_addrx:
1824 case DW_OP_GNU_const_index:
1825 case DW_OP_constx:
1826 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1827 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1828 break;
1829 case DW_OP_const1u:
1830 case DW_OP_const1s:
1831 size += 1;
1832 break;
1833 case DW_OP_const2u:
1834 case DW_OP_const2s:
1835 size += 2;
1836 break;
1837 case DW_OP_const4u:
1838 case DW_OP_const4s:
1839 size += 4;
1840 break;
1841 case DW_OP_const8u:
1842 case DW_OP_const8s:
1843 size += 8;
1844 break;
1845 case DW_OP_constu:
1846 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1847 break;
1848 case DW_OP_consts:
1849 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1850 break;
1851 case DW_OP_pick:
1852 size += 1;
1853 break;
1854 case DW_OP_plus_uconst:
1855 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1856 break;
1857 case DW_OP_skip:
1858 case DW_OP_bra:
1859 size += 2;
1860 break;
1861 case DW_OP_breg0:
1862 case DW_OP_breg1:
1863 case DW_OP_breg2:
1864 case DW_OP_breg3:
1865 case DW_OP_breg4:
1866 case DW_OP_breg5:
1867 case DW_OP_breg6:
1868 case DW_OP_breg7:
1869 case DW_OP_breg8:
1870 case DW_OP_breg9:
1871 case DW_OP_breg10:
1872 case DW_OP_breg11:
1873 case DW_OP_breg12:
1874 case DW_OP_breg13:
1875 case DW_OP_breg14:
1876 case DW_OP_breg15:
1877 case DW_OP_breg16:
1878 case DW_OP_breg17:
1879 case DW_OP_breg18:
1880 case DW_OP_breg19:
1881 case DW_OP_breg20:
1882 case DW_OP_breg21:
1883 case DW_OP_breg22:
1884 case DW_OP_breg23:
1885 case DW_OP_breg24:
1886 case DW_OP_breg25:
1887 case DW_OP_breg26:
1888 case DW_OP_breg27:
1889 case DW_OP_breg28:
1890 case DW_OP_breg29:
1891 case DW_OP_breg30:
1892 case DW_OP_breg31:
1893 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1894 break;
1895 case DW_OP_regx:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 break;
1898 case DW_OP_fbreg:
1899 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1900 break;
1901 case DW_OP_bregx:
1902 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1903 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1904 break;
1905 case DW_OP_piece:
1906 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1907 break;
1908 case DW_OP_bit_piece:
1909 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1910 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1911 break;
1912 case DW_OP_deref_size:
1913 case DW_OP_xderef_size:
1914 size += 1;
1915 break;
1916 case DW_OP_call2:
1917 size += 2;
1918 break;
1919 case DW_OP_call4:
1920 size += 4;
1921 break;
1922 case DW_OP_call_ref:
1923 case DW_OP_GNU_variable_value:
1924 size += DWARF_REF_SIZE;
1925 break;
1926 case DW_OP_implicit_value:
1927 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1928 + loc->dw_loc_oprnd1.v.val_unsigned;
1929 break;
1930 case DW_OP_implicit_pointer:
1931 case DW_OP_GNU_implicit_pointer:
1932 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1933 break;
1934 case DW_OP_entry_value:
1935 case DW_OP_GNU_entry_value:
1936 {
1937 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1938 size += size_of_uleb128 (op_size) + op_size;
1939 break;
1940 }
1941 case DW_OP_const_type:
1942 case DW_OP_GNU_const_type:
1943 {
1944 unsigned long o
1945 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1946 size += size_of_uleb128 (o) + 1;
1947 switch (loc->dw_loc_oprnd2.val_class)
1948 {
1949 case dw_val_class_vec:
1950 size += loc->dw_loc_oprnd2.v.val_vec.length
1951 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1952 break;
1953 case dw_val_class_const:
1954 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1955 break;
1956 case dw_val_class_const_double:
1957 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1958 break;
1959 case dw_val_class_wide_int:
1960 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1961 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1962 break;
1963 default:
1964 gcc_unreachable ();
1965 }
1966 break;
1967 }
1968 case DW_OP_regval_type:
1969 case DW_OP_GNU_regval_type:
1970 {
1971 unsigned long o
1972 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1973 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1974 + size_of_uleb128 (o);
1975 }
1976 break;
1977 case DW_OP_deref_type:
1978 case DW_OP_GNU_deref_type:
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1982 size += 1 + size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_convert:
1986 case DW_OP_reinterpret:
1987 case DW_OP_GNU_convert:
1988 case DW_OP_GNU_reinterpret:
1989 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1990 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1991 else
1992 {
1993 unsigned long o
1994 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1995 size += size_of_uleb128 (o);
1996 }
1997 break;
1998 case DW_OP_GNU_parameter_ref:
1999 size += 4;
2000 break;
2001 default:
2002 break;
2003 }
2004
2005 return size;
2006 }
2007
2008 /* Return the size of a series of location descriptors. */
2009
2010 unsigned long
2011 size_of_locs (dw_loc_descr_ref loc)
2012 {
2013 dw_loc_descr_ref l;
2014 unsigned long size;
2015
2016 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2017 field, to avoid writing to a PCH file. */
2018 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2019 {
2020 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2021 break;
2022 size += size_of_loc_descr (l);
2023 }
2024 if (! l)
2025 return size;
2026
2027 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2028 {
2029 l->dw_loc_addr = size;
2030 size += size_of_loc_descr (l);
2031 }
2032
2033 return size;
2034 }
2035
2036 /* Return the size of the value in a DW_AT_discr_value attribute. */
2037
2038 static int
2039 size_of_discr_value (dw_discr_value *discr_value)
2040 {
2041 if (discr_value->pos)
2042 return size_of_uleb128 (discr_value->v.uval);
2043 else
2044 return size_of_sleb128 (discr_value->v.sval);
2045 }
2046
2047 /* Return the size of the value in a DW_AT_discr_list attribute. */
2048
2049 static int
2050 size_of_discr_list (dw_discr_list_ref discr_list)
2051 {
2052 int size = 0;
2053
2054 for (dw_discr_list_ref list = discr_list;
2055 list != NULL;
2056 list = list->dw_discr_next)
2057 {
2058 /* One byte for the discriminant value descriptor, and then one or two
2059 LEB128 numbers, depending on whether it's a single case label or a
2060 range label. */
2061 size += 1;
2062 size += size_of_discr_value (&list->dw_discr_lower_bound);
2063 if (list->dw_discr_range != 0)
2064 size += size_of_discr_value (&list->dw_discr_upper_bound);
2065 }
2066 return size;
2067 }
2068
2069 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2070 static void get_ref_die_offset_label (char *, dw_die_ref);
2071 static unsigned long int get_ref_die_offset (dw_die_ref);
2072
2073 /* Output location description stack opcode's operands (if any).
2074 The for_eh_or_skip parameter controls whether register numbers are
2075 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2076 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2077 info). This should be suppressed for the cases that have not been converted
2078 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2079
2080 static void
2081 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2082 {
2083 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2084 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2085
2086 switch (loc->dw_loc_opc)
2087 {
2088 #ifdef DWARF2_DEBUGGING_INFO
2089 case DW_OP_const2u:
2090 case DW_OP_const2s:
2091 dw2_asm_output_data (2, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const4u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const4s:
2104 dw2_asm_output_data (4, val1->v.val_int, NULL);
2105 break;
2106 case DW_OP_const8u:
2107 if (loc->dtprel)
2108 {
2109 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2110 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2111 val1->v.val_addr);
2112 fputc ('\n', asm_out_file);
2113 break;
2114 }
2115 /* FALLTHRU */
2116 case DW_OP_const8s:
2117 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2118 dw2_asm_output_data (8, val1->v.val_int, NULL);
2119 break;
2120 case DW_OP_skip:
2121 case DW_OP_bra:
2122 {
2123 int offset;
2124
2125 gcc_assert (val1->val_class == dw_val_class_loc);
2126 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2127
2128 dw2_asm_output_data (2, offset, NULL);
2129 }
2130 break;
2131 case DW_OP_implicit_value:
2132 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2133 switch (val2->val_class)
2134 {
2135 case dw_val_class_const:
2136 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2137 break;
2138 case dw_val_class_vec:
2139 {
2140 unsigned int elt_size = val2->v.val_vec.elt_size;
2141 unsigned int len = val2->v.val_vec.length;
2142 unsigned int i;
2143 unsigned char *p;
2144
2145 if (elt_size > sizeof (HOST_WIDE_INT))
2146 {
2147 elt_size /= 2;
2148 len *= 2;
2149 }
2150 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2151 i < len;
2152 i++, p += elt_size)
2153 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2154 "fp or vector constant word %u", i);
2155 }
2156 break;
2157 case dw_val_class_const_double:
2158 {
2159 unsigned HOST_WIDE_INT first, second;
2160
2161 if (WORDS_BIG_ENDIAN)
2162 {
2163 first = val2->v.val_double.high;
2164 second = val2->v.val_double.low;
2165 }
2166 else
2167 {
2168 first = val2->v.val_double.low;
2169 second = val2->v.val_double.high;
2170 }
2171 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2172 first, NULL);
2173 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2174 second, NULL);
2175 }
2176 break;
2177 case dw_val_class_wide_int:
2178 {
2179 int i;
2180 int len = get_full_len (*val2->v.val_wide);
2181 if (WORDS_BIG_ENDIAN)
2182 for (i = len - 1; i >= 0; --i)
2183 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2184 val2->v.val_wide->elt (i), NULL);
2185 else
2186 for (i = 0; i < len; ++i)
2187 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2188 val2->v.val_wide->elt (i), NULL);
2189 }
2190 break;
2191 case dw_val_class_addr:
2192 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2193 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2194 break;
2195 default:
2196 gcc_unreachable ();
2197 }
2198 break;
2199 #else
2200 case DW_OP_const2u:
2201 case DW_OP_const2s:
2202 case DW_OP_const4u:
2203 case DW_OP_const4s:
2204 case DW_OP_const8u:
2205 case DW_OP_const8s:
2206 case DW_OP_skip:
2207 case DW_OP_bra:
2208 case DW_OP_implicit_value:
2209 /* We currently don't make any attempt to make sure these are
2210 aligned properly like we do for the main unwind info, so
2211 don't support emitting things larger than a byte if we're
2212 only doing unwinding. */
2213 gcc_unreachable ();
2214 #endif
2215 case DW_OP_const1u:
2216 case DW_OP_const1s:
2217 dw2_asm_output_data (1, val1->v.val_int, NULL);
2218 break;
2219 case DW_OP_constu:
2220 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2221 break;
2222 case DW_OP_consts:
2223 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2224 break;
2225 case DW_OP_pick:
2226 dw2_asm_output_data (1, val1->v.val_int, NULL);
2227 break;
2228 case DW_OP_plus_uconst:
2229 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2230 break;
2231 case DW_OP_breg0:
2232 case DW_OP_breg1:
2233 case DW_OP_breg2:
2234 case DW_OP_breg3:
2235 case DW_OP_breg4:
2236 case DW_OP_breg5:
2237 case DW_OP_breg6:
2238 case DW_OP_breg7:
2239 case DW_OP_breg8:
2240 case DW_OP_breg9:
2241 case DW_OP_breg10:
2242 case DW_OP_breg11:
2243 case DW_OP_breg12:
2244 case DW_OP_breg13:
2245 case DW_OP_breg14:
2246 case DW_OP_breg15:
2247 case DW_OP_breg16:
2248 case DW_OP_breg17:
2249 case DW_OP_breg18:
2250 case DW_OP_breg19:
2251 case DW_OP_breg20:
2252 case DW_OP_breg21:
2253 case DW_OP_breg22:
2254 case DW_OP_breg23:
2255 case DW_OP_breg24:
2256 case DW_OP_breg25:
2257 case DW_OP_breg26:
2258 case DW_OP_breg27:
2259 case DW_OP_breg28:
2260 case DW_OP_breg29:
2261 case DW_OP_breg30:
2262 case DW_OP_breg31:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_regx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 }
2274 break;
2275 case DW_OP_fbreg:
2276 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2277 break;
2278 case DW_OP_bregx:
2279 {
2280 unsigned r = val1->v.val_unsigned;
2281 if (for_eh_or_skip >= 0)
2282 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2283 gcc_assert (size_of_uleb128 (r)
2284 == size_of_uleb128 (val1->v.val_unsigned));
2285 dw2_asm_output_data_uleb128 (r, NULL);
2286 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2287 }
2288 break;
2289 case DW_OP_piece:
2290 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2291 break;
2292 case DW_OP_bit_piece:
2293 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2294 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2295 break;
2296 case DW_OP_deref_size:
2297 case DW_OP_xderef_size:
2298 dw2_asm_output_data (1, val1->v.val_int, NULL);
2299 break;
2300
2301 case DW_OP_addr:
2302 if (loc->dtprel)
2303 {
2304 if (targetm.asm_out.output_dwarf_dtprel)
2305 {
2306 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2307 DWARF2_ADDR_SIZE,
2308 val1->v.val_addr);
2309 fputc ('\n', asm_out_file);
2310 }
2311 else
2312 gcc_unreachable ();
2313 }
2314 else
2315 {
2316 #ifdef DWARF2_DEBUGGING_INFO
2317 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2318 #else
2319 gcc_unreachable ();
2320 #endif
2321 }
2322 break;
2323
2324 case DW_OP_GNU_addr_index:
2325 case DW_OP_addrx:
2326 case DW_OP_GNU_const_index:
2327 case DW_OP_constx:
2328 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2329 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2330 "(index into .debug_addr)");
2331 break;
2332
2333 case DW_OP_call2:
2334 case DW_OP_call4:
2335 {
2336 unsigned long die_offset
2337 = get_ref_die_offset (val1->v.val_die_ref.die);
2338 /* Make sure the offset has been computed and that we can encode it as
2339 an operand. */
2340 gcc_assert (die_offset > 0
2341 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2342 ? 0xffff
2343 : 0xffffffff));
2344 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2345 die_offset, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_call_ref:
2350 case DW_OP_GNU_variable_value:
2351 {
2352 char label[MAX_ARTIFICIAL_LABEL_BYTES
2353 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2354 gcc_assert (val1->val_class == dw_val_class_die_ref);
2355 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2356 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2357 }
2358 break;
2359
2360 case DW_OP_implicit_pointer:
2361 case DW_OP_GNU_implicit_pointer:
2362 {
2363 char label[MAX_ARTIFICIAL_LABEL_BYTES
2364 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2365 gcc_assert (val1->val_class == dw_val_class_die_ref);
2366 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2367 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2368 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2369 }
2370 break;
2371
2372 case DW_OP_entry_value:
2373 case DW_OP_GNU_entry_value:
2374 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2375 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2376 break;
2377
2378 case DW_OP_const_type:
2379 case DW_OP_GNU_const_type:
2380 {
2381 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2382 gcc_assert (o);
2383 dw2_asm_output_data_uleb128 (o, NULL);
2384 switch (val2->val_class)
2385 {
2386 case dw_val_class_const:
2387 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2388 dw2_asm_output_data (1, l, NULL);
2389 dw2_asm_output_data (l, val2->v.val_int, NULL);
2390 break;
2391 case dw_val_class_vec:
2392 {
2393 unsigned int elt_size = val2->v.val_vec.elt_size;
2394 unsigned int len = val2->v.val_vec.length;
2395 unsigned int i;
2396 unsigned char *p;
2397
2398 l = len * elt_size;
2399 dw2_asm_output_data (1, l, NULL);
2400 if (elt_size > sizeof (HOST_WIDE_INT))
2401 {
2402 elt_size /= 2;
2403 len *= 2;
2404 }
2405 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2406 i < len;
2407 i++, p += elt_size)
2408 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2409 "fp or vector constant word %u", i);
2410 }
2411 break;
2412 case dw_val_class_const_double:
2413 {
2414 unsigned HOST_WIDE_INT first, second;
2415 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2416
2417 dw2_asm_output_data (1, 2 * l, NULL);
2418 if (WORDS_BIG_ENDIAN)
2419 {
2420 first = val2->v.val_double.high;
2421 second = val2->v.val_double.low;
2422 }
2423 else
2424 {
2425 first = val2->v.val_double.low;
2426 second = val2->v.val_double.high;
2427 }
2428 dw2_asm_output_data (l, first, NULL);
2429 dw2_asm_output_data (l, second, NULL);
2430 }
2431 break;
2432 case dw_val_class_wide_int:
2433 {
2434 int i;
2435 int len = get_full_len (*val2->v.val_wide);
2436 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2437
2438 dw2_asm_output_data (1, len * l, NULL);
2439 if (WORDS_BIG_ENDIAN)
2440 for (i = len - 1; i >= 0; --i)
2441 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2442 else
2443 for (i = 0; i < len; ++i)
2444 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2445 }
2446 break;
2447 default:
2448 gcc_unreachable ();
2449 }
2450 }
2451 break;
2452 case DW_OP_regval_type:
2453 case DW_OP_GNU_regval_type:
2454 {
2455 unsigned r = val1->v.val_unsigned;
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 if (for_eh_or_skip >= 0)
2459 {
2460 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2461 gcc_assert (size_of_uleb128 (r)
2462 == size_of_uleb128 (val1->v.val_unsigned));
2463 }
2464 dw2_asm_output_data_uleb128 (r, NULL);
2465 dw2_asm_output_data_uleb128 (o, NULL);
2466 }
2467 break;
2468 case DW_OP_deref_type:
2469 case DW_OP_GNU_deref_type:
2470 {
2471 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2472 gcc_assert (o);
2473 dw2_asm_output_data (1, val1->v.val_int, NULL);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477 case DW_OP_convert:
2478 case DW_OP_reinterpret:
2479 case DW_OP_GNU_convert:
2480 case DW_OP_GNU_reinterpret:
2481 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2482 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2483 else
2484 {
2485 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2486 gcc_assert (o);
2487 dw2_asm_output_data_uleb128 (o, NULL);
2488 }
2489 break;
2490
2491 case DW_OP_GNU_parameter_ref:
2492 {
2493 unsigned long o;
2494 gcc_assert (val1->val_class == dw_val_class_die_ref);
2495 o = get_ref_die_offset (val1->v.val_die_ref.die);
2496 dw2_asm_output_data (4, o, NULL);
2497 }
2498 break;
2499
2500 default:
2501 /* Other codes have no operands. */
2502 break;
2503 }
2504 }
2505
2506 /* Output a sequence of location operations.
2507 The for_eh_or_skip parameter controls whether register numbers are
2508 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2509 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2510 info). This should be suppressed for the cases that have not been converted
2511 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2512
2513 void
2514 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2515 {
2516 for (; loc != NULL; loc = loc->dw_loc_next)
2517 {
2518 enum dwarf_location_atom opc = loc->dw_loc_opc;
2519 /* Output the opcode. */
2520 if (for_eh_or_skip >= 0
2521 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2522 {
2523 unsigned r = (opc - DW_OP_breg0);
2524 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2525 gcc_assert (r <= 31);
2526 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2527 }
2528 else if (for_eh_or_skip >= 0
2529 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2530 {
2531 unsigned r = (opc - DW_OP_reg0);
2532 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2533 gcc_assert (r <= 31);
2534 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2535 }
2536
2537 dw2_asm_output_data (1, opc,
2538 "%s", dwarf_stack_op_name (opc));
2539
2540 /* Output the operand(s) (if any). */
2541 output_loc_operands (loc, for_eh_or_skip);
2542 }
2543 }
2544
2545 /* Output location description stack opcode's operands (if any).
2546 The output is single bytes on a line, suitable for .cfi_escape. */
2547
2548 static void
2549 output_loc_operands_raw (dw_loc_descr_ref loc)
2550 {
2551 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2552 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2553
2554 switch (loc->dw_loc_opc)
2555 {
2556 case DW_OP_addr:
2557 case DW_OP_GNU_addr_index:
2558 case DW_OP_addrx:
2559 case DW_OP_GNU_const_index:
2560 case DW_OP_constx:
2561 case DW_OP_implicit_value:
2562 /* We cannot output addresses in .cfi_escape, only bytes. */
2563 gcc_unreachable ();
2564
2565 case DW_OP_const1u:
2566 case DW_OP_const1s:
2567 case DW_OP_pick:
2568 case DW_OP_deref_size:
2569 case DW_OP_xderef_size:
2570 fputc (',', asm_out_file);
2571 dw2_asm_output_data_raw (1, val1->v.val_int);
2572 break;
2573
2574 case DW_OP_const2u:
2575 case DW_OP_const2s:
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (2, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_const4u:
2581 case DW_OP_const4s:
2582 fputc (',', asm_out_file);
2583 dw2_asm_output_data_raw (4, val1->v.val_int);
2584 break;
2585
2586 case DW_OP_const8u:
2587 case DW_OP_const8s:
2588 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2589 fputc (',', asm_out_file);
2590 dw2_asm_output_data_raw (8, val1->v.val_int);
2591 break;
2592
2593 case DW_OP_skip:
2594 case DW_OP_bra:
2595 {
2596 int offset;
2597
2598 gcc_assert (val1->val_class == dw_val_class_loc);
2599 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2600
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_raw (2, offset);
2603 }
2604 break;
2605
2606 case DW_OP_regx:
2607 {
2608 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2609 gcc_assert (size_of_uleb128 (r)
2610 == size_of_uleb128 (val1->v.val_unsigned));
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (r);
2613 }
2614 break;
2615
2616 case DW_OP_constu:
2617 case DW_OP_plus_uconst:
2618 case DW_OP_piece:
2619 fputc (',', asm_out_file);
2620 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2621 break;
2622
2623 case DW_OP_bit_piece:
2624 fputc (',', asm_out_file);
2625 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2626 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2627 break;
2628
2629 case DW_OP_consts:
2630 case DW_OP_breg0:
2631 case DW_OP_breg1:
2632 case DW_OP_breg2:
2633 case DW_OP_breg3:
2634 case DW_OP_breg4:
2635 case DW_OP_breg5:
2636 case DW_OP_breg6:
2637 case DW_OP_breg7:
2638 case DW_OP_breg8:
2639 case DW_OP_breg9:
2640 case DW_OP_breg10:
2641 case DW_OP_breg11:
2642 case DW_OP_breg12:
2643 case DW_OP_breg13:
2644 case DW_OP_breg14:
2645 case DW_OP_breg15:
2646 case DW_OP_breg16:
2647 case DW_OP_breg17:
2648 case DW_OP_breg18:
2649 case DW_OP_breg19:
2650 case DW_OP_breg20:
2651 case DW_OP_breg21:
2652 case DW_OP_breg22:
2653 case DW_OP_breg23:
2654 case DW_OP_breg24:
2655 case DW_OP_breg25:
2656 case DW_OP_breg26:
2657 case DW_OP_breg27:
2658 case DW_OP_breg28:
2659 case DW_OP_breg29:
2660 case DW_OP_breg30:
2661 case DW_OP_breg31:
2662 case DW_OP_fbreg:
2663 fputc (',', asm_out_file);
2664 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2665 break;
2666
2667 case DW_OP_bregx:
2668 {
2669 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2670 gcc_assert (size_of_uleb128 (r)
2671 == size_of_uleb128 (val1->v.val_unsigned));
2672 fputc (',', asm_out_file);
2673 dw2_asm_output_data_uleb128_raw (r);
2674 fputc (',', asm_out_file);
2675 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2676 }
2677 break;
2678
2679 case DW_OP_implicit_pointer:
2680 case DW_OP_entry_value:
2681 case DW_OP_const_type:
2682 case DW_OP_regval_type:
2683 case DW_OP_deref_type:
2684 case DW_OP_convert:
2685 case DW_OP_reinterpret:
2686 case DW_OP_GNU_implicit_pointer:
2687 case DW_OP_GNU_entry_value:
2688 case DW_OP_GNU_const_type:
2689 case DW_OP_GNU_regval_type:
2690 case DW_OP_GNU_deref_type:
2691 case DW_OP_GNU_convert:
2692 case DW_OP_GNU_reinterpret:
2693 case DW_OP_GNU_parameter_ref:
2694 gcc_unreachable ();
2695 break;
2696
2697 default:
2698 /* Other codes have no operands. */
2699 break;
2700 }
2701 }
2702
2703 void
2704 output_loc_sequence_raw (dw_loc_descr_ref loc)
2705 {
2706 while (1)
2707 {
2708 enum dwarf_location_atom opc = loc->dw_loc_opc;
2709 /* Output the opcode. */
2710 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2711 {
2712 unsigned r = (opc - DW_OP_breg0);
2713 r = DWARF2_FRAME_REG_OUT (r, 1);
2714 gcc_assert (r <= 31);
2715 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2716 }
2717 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2718 {
2719 unsigned r = (opc - DW_OP_reg0);
2720 r = DWARF2_FRAME_REG_OUT (r, 1);
2721 gcc_assert (r <= 31);
2722 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2723 }
2724 /* Output the opcode. */
2725 fprintf (asm_out_file, "%#x", opc);
2726 output_loc_operands_raw (loc);
2727
2728 if (!loc->dw_loc_next)
2729 break;
2730 loc = loc->dw_loc_next;
2731
2732 fputc (',', asm_out_file);
2733 }
2734 }
2735
2736 /* This function builds a dwarf location descriptor sequence from a
2737 dw_cfa_location, adding the given OFFSET to the result of the
2738 expression. */
2739
2740 struct dw_loc_descr_node *
2741 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2742 {
2743 struct dw_loc_descr_node *head, *tmp;
2744
2745 offset += cfa->offset;
2746
2747 if (cfa->indirect)
2748 {
2749 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2750 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2751 head->dw_loc_oprnd1.val_entry = NULL;
2752 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2753 add_loc_descr (&head, tmp);
2754 loc_descr_plus_const (&head, offset);
2755 }
2756 else
2757 head = new_reg_loc_descr (cfa->reg, offset);
2758
2759 return head;
2760 }
2761
2762 /* This function builds a dwarf location descriptor sequence for
2763 the address at OFFSET from the CFA when stack is aligned to
2764 ALIGNMENT byte. */
2765
2766 struct dw_loc_descr_node *
2767 build_cfa_aligned_loc (dw_cfa_location *cfa,
2768 poly_int64 offset, HOST_WIDE_INT alignment)
2769 {
2770 struct dw_loc_descr_node *head;
2771 unsigned int dwarf_fp
2772 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2773
2774 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2775 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2776 {
2777 head = new_reg_loc_descr (dwarf_fp, 0);
2778 add_loc_descr (&head, int_loc_descriptor (alignment));
2779 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2780 loc_descr_plus_const (&head, offset);
2781 }
2782 else
2783 head = new_reg_loc_descr (dwarf_fp, offset);
2784 return head;
2785 }
2786 \f
2787 /* And now, the support for symbolic debugging information. */
2788
2789 /* .debug_str support. */
2790
2791 static void dwarf2out_init (const char *);
2792 static void dwarf2out_finish (const char *);
2793 static void dwarf2out_early_finish (const char *);
2794 static void dwarf2out_assembly_start (void);
2795 static void dwarf2out_define (unsigned int, const char *);
2796 static void dwarf2out_undef (unsigned int, const char *);
2797 static void dwarf2out_start_source_file (unsigned, const char *);
2798 static void dwarf2out_end_source_file (unsigned);
2799 static void dwarf2out_function_decl (tree);
2800 static void dwarf2out_begin_block (unsigned, unsigned);
2801 static void dwarf2out_end_block (unsigned, unsigned);
2802 static bool dwarf2out_ignore_block (const_tree);
2803 static void dwarf2out_early_global_decl (tree);
2804 static void dwarf2out_late_global_decl (tree);
2805 static void dwarf2out_type_decl (tree, int);
2806 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2807 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2808 dw_die_ref);
2809 static void dwarf2out_abstract_function (tree);
2810 static void dwarf2out_var_location (rtx_insn *);
2811 static void dwarf2out_inline_entry (tree);
2812 static void dwarf2out_size_function (tree);
2813 static void dwarf2out_begin_function (tree);
2814 static void dwarf2out_end_function (unsigned int);
2815 static void dwarf2out_register_main_translation_unit (tree unit);
2816 static void dwarf2out_set_name (tree, tree);
2817 static void dwarf2out_register_external_die (tree decl, const char *sym,
2818 unsigned HOST_WIDE_INT off);
2819 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2820 unsigned HOST_WIDE_INT *off);
2821
2822 /* The debug hooks structure. */
2823
2824 const struct gcc_debug_hooks dwarf2_debug_hooks =
2825 {
2826 dwarf2out_init,
2827 dwarf2out_finish,
2828 dwarf2out_early_finish,
2829 dwarf2out_assembly_start,
2830 dwarf2out_define,
2831 dwarf2out_undef,
2832 dwarf2out_start_source_file,
2833 dwarf2out_end_source_file,
2834 dwarf2out_begin_block,
2835 dwarf2out_end_block,
2836 dwarf2out_ignore_block,
2837 dwarf2out_source_line,
2838 dwarf2out_begin_prologue,
2839 #if VMS_DEBUGGING_INFO
2840 dwarf2out_vms_end_prologue,
2841 dwarf2out_vms_begin_epilogue,
2842 #else
2843 debug_nothing_int_charstar,
2844 debug_nothing_int_charstar,
2845 #endif
2846 dwarf2out_end_epilogue,
2847 dwarf2out_begin_function,
2848 dwarf2out_end_function, /* end_function */
2849 dwarf2out_register_main_translation_unit,
2850 dwarf2out_function_decl, /* function_decl */
2851 dwarf2out_early_global_decl,
2852 dwarf2out_late_global_decl,
2853 dwarf2out_type_decl, /* type_decl */
2854 dwarf2out_imported_module_or_decl,
2855 dwarf2out_die_ref_for_decl,
2856 dwarf2out_register_external_die,
2857 debug_nothing_tree, /* deferred_inline_function */
2858 /* The DWARF 2 backend tries to reduce debugging bloat by not
2859 emitting the abstract description of inline functions until
2860 something tries to reference them. */
2861 dwarf2out_abstract_function, /* outlining_inline_function */
2862 debug_nothing_rtx_code_label, /* label */
2863 debug_nothing_int, /* handle_pch */
2864 dwarf2out_var_location,
2865 dwarf2out_inline_entry, /* inline_entry */
2866 dwarf2out_size_function, /* size_function */
2867 dwarf2out_switch_text_section,
2868 dwarf2out_set_name,
2869 1, /* start_end_main_source_file */
2870 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2871 };
2872
2873 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2874 {
2875 dwarf2out_init,
2876 debug_nothing_charstar,
2877 debug_nothing_charstar,
2878 dwarf2out_assembly_start,
2879 debug_nothing_int_charstar,
2880 debug_nothing_int_charstar,
2881 debug_nothing_int_charstar,
2882 debug_nothing_int,
2883 debug_nothing_int_int, /* begin_block */
2884 debug_nothing_int_int, /* end_block */
2885 debug_true_const_tree, /* ignore_block */
2886 dwarf2out_source_line, /* source_line */
2887 debug_nothing_int_int_charstar, /* begin_prologue */
2888 debug_nothing_int_charstar, /* end_prologue */
2889 debug_nothing_int_charstar, /* begin_epilogue */
2890 debug_nothing_int_charstar, /* end_epilogue */
2891 debug_nothing_tree, /* begin_function */
2892 debug_nothing_int, /* end_function */
2893 debug_nothing_tree, /* register_main_translation_unit */
2894 debug_nothing_tree, /* function_decl */
2895 debug_nothing_tree, /* early_global_decl */
2896 debug_nothing_tree, /* late_global_decl */
2897 debug_nothing_tree_int, /* type_decl */
2898 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2899 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2900 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2901 debug_nothing_tree, /* deferred_inline_function */
2902 debug_nothing_tree, /* outlining_inline_function */
2903 debug_nothing_rtx_code_label, /* label */
2904 debug_nothing_int, /* handle_pch */
2905 debug_nothing_rtx_insn, /* var_location */
2906 debug_nothing_tree, /* inline_entry */
2907 debug_nothing_tree, /* size_function */
2908 debug_nothing_void, /* switch_text_section */
2909 debug_nothing_tree_tree, /* set_name */
2910 0, /* start_end_main_source_file */
2911 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2912 };
2913 \f
2914 /* NOTE: In the comments in this file, many references are made to
2915 "Debugging Information Entries". This term is abbreviated as `DIE'
2916 throughout the remainder of this file. */
2917
2918 /* An internal representation of the DWARF output is built, and then
2919 walked to generate the DWARF debugging info. The walk of the internal
2920 representation is done after the entire program has been compiled.
2921 The types below are used to describe the internal representation. */
2922
2923 /* Whether to put type DIEs into their own section .debug_types instead
2924 of making them part of the .debug_info section. Only supported for
2925 Dwarf V4 or higher and the user didn't disable them through
2926 -fno-debug-types-section. It is more efficient to put them in a
2927 separate comdat sections since the linker will then be able to
2928 remove duplicates. But not all tools support .debug_types sections
2929 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2930 it is DW_UT_type unit type in .debug_info section. For late LTO
2931 debug there should be almost no types emitted so avoid enabling
2932 -fdebug-types-section there. */
2933
2934 #define use_debug_types (dwarf_version >= 4 \
2935 && flag_debug_types_section \
2936 && !in_lto_p)
2937
2938 /* Various DIE's use offsets relative to the beginning of the
2939 .debug_info section to refer to each other. */
2940
2941 typedef long int dw_offset;
2942
2943 struct comdat_type_node;
2944
2945 /* The entries in the line_info table more-or-less mirror the opcodes
2946 that are used in the real dwarf line table. Arrays of these entries
2947 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2948 supported. */
2949
2950 enum dw_line_info_opcode {
2951 /* Emit DW_LNE_set_address; the operand is the label index. */
2952 LI_set_address,
2953
2954 /* Emit a row to the matrix with the given line. This may be done
2955 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2956 special opcodes. */
2957 LI_set_line,
2958
2959 /* Emit a DW_LNS_set_file. */
2960 LI_set_file,
2961
2962 /* Emit a DW_LNS_set_column. */
2963 LI_set_column,
2964
2965 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2966 LI_negate_stmt,
2967
2968 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2969 LI_set_prologue_end,
2970 LI_set_epilogue_begin,
2971
2972 /* Emit a DW_LNE_set_discriminator. */
2973 LI_set_discriminator,
2974
2975 /* Output a Fixed Advance PC; the target PC is the label index; the
2976 base PC is the previous LI_adv_address or LI_set_address entry.
2977 We only use this when emitting debug views without assembler
2978 support, at explicit user request. Ideally, we should only use
2979 it when the offset might be zero but we can't tell: it's the only
2980 way to maybe change the PC without resetting the view number. */
2981 LI_adv_address
2982 };
2983
2984 typedef struct GTY(()) dw_line_info_struct {
2985 enum dw_line_info_opcode opcode;
2986 unsigned int val;
2987 } dw_line_info_entry;
2988
2989
2990 struct GTY(()) dw_line_info_table {
2991 /* The label that marks the end of this section. */
2992 const char *end_label;
2993
2994 /* The values for the last row of the matrix, as collected in the table.
2995 These are used to minimize the changes to the next row. */
2996 unsigned int file_num;
2997 unsigned int line_num;
2998 unsigned int column_num;
2999 int discrim_num;
3000 bool is_stmt;
3001 bool in_use;
3002
3003 /* This denotes the NEXT view number.
3004
3005 If it is 0, it is known that the NEXT view will be the first view
3006 at the given PC.
3007
3008 If it is -1, we're forcing the view number to be reset, e.g. at a
3009 function entry.
3010
3011 The meaning of other nonzero values depends on whether we're
3012 computing views internally or leaving it for the assembler to do
3013 so. If we're emitting them internally, view denotes the view
3014 number since the last known advance of PC. If we're leaving it
3015 for the assembler, it denotes the LVU label number that we're
3016 going to ask the assembler to assign. */
3017 var_loc_view view;
3018
3019 /* This counts the number of symbolic views emitted in this table
3020 since the latest view reset. Its max value, over all tables,
3021 sets symview_upper_bound. */
3022 var_loc_view symviews_since_reset;
3023
3024 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3025 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3026 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3027 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3028
3029 vec<dw_line_info_entry, va_gc> *entries;
3030 };
3031
3032 /* This is an upper bound for view numbers that the assembler may
3033 assign to symbolic views output in this translation. It is used to
3034 decide how big a field to use to represent view numbers in
3035 symview-classed attributes. */
3036
3037 static var_loc_view symview_upper_bound;
3038
3039 /* If we're keep track of location views and their reset points, and
3040 INSN is a reset point (i.e., it necessarily advances the PC), mark
3041 the next view in TABLE as reset. */
3042
3043 static void
3044 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3045 {
3046 if (!debug_internal_reset_location_views)
3047 return;
3048
3049 /* Maybe turn (part of?) this test into a default target hook. */
3050 int reset = 0;
3051
3052 if (targetm.reset_location_view)
3053 reset = targetm.reset_location_view (insn);
3054
3055 if (reset)
3056 ;
3057 else if (JUMP_TABLE_DATA_P (insn))
3058 reset = 1;
3059 else if (GET_CODE (insn) == USE
3060 || GET_CODE (insn) == CLOBBER
3061 || GET_CODE (insn) == ASM_INPUT
3062 || asm_noperands (insn) >= 0)
3063 ;
3064 else if (get_attr_min_length (insn) > 0)
3065 reset = 1;
3066
3067 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3068 RESET_NEXT_VIEW (table->view);
3069 }
3070
3071 /* Each DIE attribute has a field specifying the attribute kind,
3072 a link to the next attribute in the chain, and an attribute value.
3073 Attributes are typically linked below the DIE they modify. */
3074
3075 typedef struct GTY(()) dw_attr_struct {
3076 enum dwarf_attribute dw_attr;
3077 dw_val_node dw_attr_val;
3078 }
3079 dw_attr_node;
3080
3081
3082 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3083 The children of each node form a circular list linked by
3084 die_sib. die_child points to the node *before* the "first" child node. */
3085
3086 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3087 union die_symbol_or_type_node
3088 {
3089 const char * GTY ((tag ("0"))) die_symbol;
3090 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3091 }
3092 GTY ((desc ("%0.comdat_type_p"))) die_id;
3093 vec<dw_attr_node, va_gc> *die_attr;
3094 dw_die_ref die_parent;
3095 dw_die_ref die_child;
3096 dw_die_ref die_sib;
3097 dw_die_ref die_definition; /* ref from a specification to its definition */
3098 dw_offset die_offset;
3099 unsigned long die_abbrev;
3100 int die_mark;
3101 unsigned int decl_id;
3102 enum dwarf_tag die_tag;
3103 /* Die is used and must not be pruned as unused. */
3104 BOOL_BITFIELD die_perennial_p : 1;
3105 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3106 /* For an external ref to die_symbol if die_offset contains an extra
3107 offset to that symbol. */
3108 BOOL_BITFIELD with_offset : 1;
3109 /* Whether this DIE was removed from the DIE tree, for example via
3110 prune_unused_types. We don't consider those present from the
3111 DIE lookup routines. */
3112 BOOL_BITFIELD removed : 1;
3113 /* Lots of spare bits. */
3114 }
3115 die_node;
3116
3117 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3118 static bool early_dwarf;
3119 static bool early_dwarf_finished;
3120 class set_early_dwarf {
3121 public:
3122 bool saved;
3123 set_early_dwarf () : saved(early_dwarf)
3124 {
3125 gcc_assert (! early_dwarf_finished);
3126 early_dwarf = true;
3127 }
3128 ~set_early_dwarf () { early_dwarf = saved; }
3129 };
3130
3131 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3132 #define FOR_EACH_CHILD(die, c, expr) do { \
3133 c = die->die_child; \
3134 if (c) do { \
3135 c = c->die_sib; \
3136 expr; \
3137 } while (c != die->die_child); \
3138 } while (0)
3139
3140 /* The pubname structure */
3141
3142 typedef struct GTY(()) pubname_struct {
3143 dw_die_ref die;
3144 const char *name;
3145 }
3146 pubname_entry;
3147
3148
3149 struct GTY(()) dw_ranges {
3150 const char *label;
3151 /* If this is positive, it's a block number, otherwise it's a
3152 bitwise-negated index into dw_ranges_by_label. */
3153 int num;
3154 /* Index for the range list for DW_FORM_rnglistx. */
3155 unsigned int idx : 31;
3156 /* True if this range might be possibly in a different section
3157 from previous entry. */
3158 unsigned int maybe_new_sec : 1;
3159 };
3160
3161 /* A structure to hold a macinfo entry. */
3162
3163 typedef struct GTY(()) macinfo_struct {
3164 unsigned char code;
3165 unsigned HOST_WIDE_INT lineno;
3166 const char *info;
3167 }
3168 macinfo_entry;
3169
3170
3171 struct GTY(()) dw_ranges_by_label {
3172 const char *begin;
3173 const char *end;
3174 };
3175
3176 /* The comdat type node structure. */
3177 struct GTY(()) comdat_type_node
3178 {
3179 dw_die_ref root_die;
3180 dw_die_ref type_die;
3181 dw_die_ref skeleton_die;
3182 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3183 comdat_type_node *next;
3184 };
3185
3186 /* A list of DIEs for which we can't determine ancestry (parent_die
3187 field) just yet. Later in dwarf2out_finish we will fill in the
3188 missing bits. */
3189 typedef struct GTY(()) limbo_die_struct {
3190 dw_die_ref die;
3191 /* The tree for which this DIE was created. We use this to
3192 determine ancestry later. */
3193 tree created_for;
3194 struct limbo_die_struct *next;
3195 }
3196 limbo_die_node;
3197
3198 typedef struct skeleton_chain_struct
3199 {
3200 dw_die_ref old_die;
3201 dw_die_ref new_die;
3202 struct skeleton_chain_struct *parent;
3203 }
3204 skeleton_chain_node;
3205
3206 /* Define a macro which returns nonzero for a TYPE_DECL which was
3207 implicitly generated for a type.
3208
3209 Note that, unlike the C front-end (which generates a NULL named
3210 TYPE_DECL node for each complete tagged type, each array type,
3211 and each function type node created) the C++ front-end generates
3212 a _named_ TYPE_DECL node for each tagged type node created.
3213 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3214 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3215 front-end, but for each type, tagged or not. */
3216
3217 #define TYPE_DECL_IS_STUB(decl) \
3218 (DECL_NAME (decl) == NULL_TREE \
3219 || (DECL_ARTIFICIAL (decl) \
3220 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3221 /* This is necessary for stub decls that \
3222 appear in nested inline functions. */ \
3223 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3224 && (decl_ultimate_origin (decl) \
3225 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3226
3227 /* Information concerning the compilation unit's programming
3228 language, and compiler version. */
3229
3230 /* Fixed size portion of the DWARF compilation unit header. */
3231 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3232 (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size \
3233 + (dwarf_version >= 5 ? 4 : 3))
3234
3235 /* Fixed size portion of the DWARF comdat type unit header. */
3236 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3237 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3238 + DWARF_TYPE_SIGNATURE_SIZE + dwarf_offset_size)
3239
3240 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3241 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3242 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3243
3244 /* Fixed size portion of public names info. */
3245 #define DWARF_PUBNAMES_HEADER_SIZE (2 * dwarf_offset_size + 2)
3246
3247 /* Fixed size portion of the address range info. */
3248 #define DWARF_ARANGES_HEADER_SIZE \
3249 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \
3250 DWARF2_ADDR_SIZE * 2) \
3251 - DWARF_INITIAL_LENGTH_SIZE)
3252
3253 /* Size of padding portion in the address range info. It must be
3254 aligned to twice the pointer size. */
3255 #define DWARF_ARANGES_PAD_SIZE \
3256 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \
3257 DWARF2_ADDR_SIZE * 2) \
3258 - (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4))
3259
3260 /* Use assembler line directives if available. */
3261 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3262 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3263 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3264 #else
3265 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3266 #endif
3267 #endif
3268
3269 /* Use assembler views in line directives if available. */
3270 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3271 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3272 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3273 #else
3274 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3275 #endif
3276 #endif
3277
3278 /* Return true if GCC configure detected assembler support for .loc. */
3279
3280 bool
3281 dwarf2out_default_as_loc_support (void)
3282 {
3283 return DWARF2_ASM_LINE_DEBUG_INFO;
3284 #if (GCC_VERSION >= 3000)
3285 # undef DWARF2_ASM_LINE_DEBUG_INFO
3286 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3287 #endif
3288 }
3289
3290 /* Return true if GCC configure detected assembler support for views
3291 in .loc directives. */
3292
3293 bool
3294 dwarf2out_default_as_locview_support (void)
3295 {
3296 return DWARF2_ASM_VIEW_DEBUG_INFO;
3297 #if (GCC_VERSION >= 3000)
3298 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3299 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3300 #endif
3301 }
3302
3303 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3304 view computation, and it refers to a view identifier for which we
3305 will not emit a label because it is known to map to a view number
3306 zero. We won't allocate the bitmap if we're not using assembler
3307 support for location views, but we have to make the variable
3308 visible for GGC and for code that will be optimized out for lack of
3309 support but that's still parsed and compiled. We could abstract it
3310 out with macros, but it's not worth it. */
3311 static GTY(()) bitmap zero_view_p;
3312
3313 /* Evaluate to TRUE iff N is known to identify the first location view
3314 at its PC. When not using assembler location view computation,
3315 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3316 and views label numbers recorded in it are the ones known to be
3317 zero. */
3318 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3319 || (N) == (var_loc_view)-1 \
3320 || (zero_view_p \
3321 && bitmap_bit_p (zero_view_p, (N))))
3322
3323 /* Return true iff we're to emit .loc directives for the assembler to
3324 generate line number sections.
3325
3326 When we're not emitting views, all we need from the assembler is
3327 support for .loc directives.
3328
3329 If we are emitting views, we can only use the assembler's .loc
3330 support if it also supports views.
3331
3332 When the compiler is emitting the line number programs and
3333 computing view numbers itself, it resets view numbers at known PC
3334 changes and counts from that, and then it emits view numbers as
3335 literal constants in locviewlists. There are cases in which the
3336 compiler is not sure about PC changes, e.g. when extra alignment is
3337 requested for a label. In these cases, the compiler may not reset
3338 the view counter, and the potential PC advance in the line number
3339 program will use an opcode that does not reset the view counter
3340 even if the PC actually changes, so that compiler and debug info
3341 consumer can keep view numbers in sync.
3342
3343 When the compiler defers view computation to the assembler, it
3344 emits symbolic view numbers in locviewlists, with the exception of
3345 views known to be zero (forced resets, or reset after
3346 compiler-visible PC changes): instead of emitting symbols for
3347 these, we emit literal zero and assert the assembler agrees with
3348 the compiler's assessment. We could use symbolic views everywhere,
3349 instead of special-casing zero views, but then we'd be unable to
3350 optimize out locviewlists that contain only zeros. */
3351
3352 static bool
3353 output_asm_line_debug_info (void)
3354 {
3355 return (dwarf2out_as_loc_support
3356 && (dwarf2out_as_locview_support
3357 || !debug_variable_location_views));
3358 }
3359
3360 static bool asm_outputs_debug_line_str (void);
3361
3362 /* Minimum line offset in a special line info. opcode.
3363 This value was chosen to give a reasonable range of values. */
3364 #define DWARF_LINE_BASE -10
3365
3366 /* First special line opcode - leave room for the standard opcodes. */
3367 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3368
3369 /* Range of line offsets in a special line info. opcode. */
3370 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3371
3372 /* Flag that indicates the initial value of the is_stmt_start flag.
3373 In the present implementation, we do not mark any lines as
3374 the beginning of a source statement, because that information
3375 is not made available by the GCC front-end. */
3376 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3377
3378 /* Maximum number of operations per instruction bundle. */
3379 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3380 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3381 #endif
3382
3383 /* This location is used by calc_die_sizes() to keep track
3384 the offset of each DIE within the .debug_info section. */
3385 static unsigned long next_die_offset;
3386
3387 /* Record the root of the DIE's built for the current compilation unit. */
3388 static GTY(()) dw_die_ref single_comp_unit_die;
3389
3390 /* A list of type DIEs that have been separated into comdat sections. */
3391 static GTY(()) comdat_type_node *comdat_type_list;
3392
3393 /* A list of CU DIEs that have been separated. */
3394 static GTY(()) limbo_die_node *cu_die_list;
3395
3396 /* A list of DIEs with a NULL parent waiting to be relocated. */
3397 static GTY(()) limbo_die_node *limbo_die_list;
3398
3399 /* A list of DIEs for which we may have to generate
3400 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3401 static GTY(()) limbo_die_node *deferred_asm_name;
3402
3403 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3404 {
3405 typedef const char *compare_type;
3406
3407 static hashval_t hash (dwarf_file_data *);
3408 static bool equal (dwarf_file_data *, const char *);
3409 };
3410
3411 /* Filenames referenced by this compilation unit. */
3412 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3413
3414 struct decl_die_hasher : ggc_ptr_hash<die_node>
3415 {
3416 typedef tree compare_type;
3417
3418 static hashval_t hash (die_node *);
3419 static bool equal (die_node *, tree);
3420 };
3421 /* A hash table of references to DIE's that describe declarations.
3422 The key is a DECL_UID() which is a unique number identifying each decl. */
3423 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3424
3425 struct GTY ((for_user)) variable_value_struct {
3426 unsigned int decl_id;
3427 vec<dw_die_ref, va_gc> *dies;
3428 };
3429
3430 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3431 {
3432 typedef tree compare_type;
3433
3434 static hashval_t hash (variable_value_struct *);
3435 static bool equal (variable_value_struct *, tree);
3436 };
3437 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3438 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3439 DECL_CONTEXT of the referenced VAR_DECLs. */
3440 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3441
3442 struct block_die_hasher : ggc_ptr_hash<die_struct>
3443 {
3444 static hashval_t hash (die_struct *);
3445 static bool equal (die_struct *, die_struct *);
3446 };
3447
3448 /* A hash table of references to DIE's that describe COMMON blocks.
3449 The key is DECL_UID() ^ die_parent. */
3450 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3451
3452 typedef struct GTY(()) die_arg_entry_struct {
3453 dw_die_ref die;
3454 tree arg;
3455 } die_arg_entry;
3456
3457
3458 /* Node of the variable location list. */
3459 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3460 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3461 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3462 in mode of the EXPR_LIST node and first EXPR_LIST operand
3463 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3464 location or NULL for padding. For larger bitsizes,
3465 mode is 0 and first operand is a CONCAT with bitsize
3466 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3467 NULL as second operand. */
3468 rtx GTY (()) loc;
3469 const char * GTY (()) label;
3470 struct var_loc_node * GTY (()) next;
3471 var_loc_view view;
3472 };
3473
3474 /* Variable location list. */
3475 struct GTY ((for_user)) var_loc_list_def {
3476 struct var_loc_node * GTY (()) first;
3477
3478 /* Pointer to the last but one or last element of the
3479 chained list. If the list is empty, both first and
3480 last are NULL, if the list contains just one node
3481 or the last node certainly is not redundant, it points
3482 to the last node, otherwise points to the last but one.
3483 Do not mark it for GC because it is marked through the chain. */
3484 struct var_loc_node * GTY ((skip ("%h"))) last;
3485
3486 /* Pointer to the last element before section switch,
3487 if NULL, either sections weren't switched or first
3488 is after section switch. */
3489 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3490
3491 /* DECL_UID of the variable decl. */
3492 unsigned int decl_id;
3493 };
3494 typedef struct var_loc_list_def var_loc_list;
3495
3496 /* Call argument location list. */
3497 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3498 rtx GTY (()) call_arg_loc_note;
3499 const char * GTY (()) label;
3500 tree GTY (()) block;
3501 bool tail_call_p;
3502 rtx GTY (()) symbol_ref;
3503 struct call_arg_loc_node * GTY (()) next;
3504 };
3505
3506
3507 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3508 {
3509 typedef const_tree compare_type;
3510
3511 static hashval_t hash (var_loc_list *);
3512 static bool equal (var_loc_list *, const_tree);
3513 };
3514
3515 /* Table of decl location linked lists. */
3516 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3517
3518 /* Head and tail of call_arg_loc chain. */
3519 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3520 static struct call_arg_loc_node *call_arg_loc_last;
3521
3522 /* Number of call sites in the current function. */
3523 static int call_site_count = -1;
3524 /* Number of tail call sites in the current function. */
3525 static int tail_call_site_count = -1;
3526
3527 /* A cached location list. */
3528 struct GTY ((for_user)) cached_dw_loc_list_def {
3529 /* The DECL_UID of the decl that this entry describes. */
3530 unsigned int decl_id;
3531
3532 /* The cached location list. */
3533 dw_loc_list_ref loc_list;
3534 };
3535 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3536
3537 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3538 {
3539
3540 typedef const_tree compare_type;
3541
3542 static hashval_t hash (cached_dw_loc_list *);
3543 static bool equal (cached_dw_loc_list *, const_tree);
3544 };
3545
3546 /* Table of cached location lists. */
3547 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3548
3549 /* A vector of references to DIE's that are uniquely identified by their tag,
3550 presence/absence of children DIE's, and list of attribute/value pairs. */
3551 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3552
3553 /* A hash map to remember the stack usage for DWARF procedures. The value
3554 stored is the stack size difference between before the DWARF procedure
3555 invokation and after it returned. In other words, for a DWARF procedure
3556 that consumes N stack slots and that pushes M ones, this stores M - N. */
3557 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3558
3559 /* A global counter for generating labels for line number data. */
3560 static unsigned int line_info_label_num;
3561
3562 /* The current table to which we should emit line number information
3563 for the current function. This will be set up at the beginning of
3564 assembly for the function. */
3565 static GTY(()) dw_line_info_table *cur_line_info_table;
3566
3567 /* The two default tables of line number info. */
3568 static GTY(()) dw_line_info_table *text_section_line_info;
3569 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3570
3571 /* The set of all non-default tables of line number info. */
3572 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3573
3574 /* A flag to tell pubnames/types export if there is an info section to
3575 refer to. */
3576 static bool info_section_emitted;
3577
3578 /* A pointer to the base of a table that contains a list of publicly
3579 accessible names. */
3580 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3581
3582 /* A pointer to the base of a table that contains a list of publicly
3583 accessible types. */
3584 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3585
3586 /* A pointer to the base of a table that contains a list of macro
3587 defines/undefines (and file start/end markers). */
3588 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3589
3590 /* True if .debug_macinfo or .debug_macros section is going to be
3591 emitted. */
3592 #define have_macinfo \
3593 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3594 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3595 && !macinfo_table->is_empty ())
3596
3597 /* Vector of dies for which we should generate .debug_ranges info. */
3598 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3599
3600 /* Vector of pairs of labels referenced in ranges_table. */
3601 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3602
3603 /* Whether we have location lists that need outputting */
3604 static GTY(()) bool have_location_lists;
3605
3606 /* Unique label counter. */
3607 static GTY(()) unsigned int loclabel_num;
3608
3609 /* Unique label counter for point-of-call tables. */
3610 static GTY(()) unsigned int poc_label_num;
3611
3612 /* The last file entry emitted by maybe_emit_file(). */
3613 static GTY(()) struct dwarf_file_data * last_emitted_file;
3614
3615 /* Number of internal labels generated by gen_internal_sym(). */
3616 static GTY(()) int label_num;
3617
3618 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3619
3620 /* Instances of generic types for which we need to generate debug
3621 info that describe their generic parameters and arguments. That
3622 generation needs to happen once all types are properly laid out so
3623 we do it at the end of compilation. */
3624 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3625
3626 /* Offset from the "steady-state frame pointer" to the frame base,
3627 within the current function. */
3628 static poly_int64 frame_pointer_fb_offset;
3629 static bool frame_pointer_fb_offset_valid;
3630
3631 static vec<dw_die_ref> base_types;
3632
3633 /* Flags to represent a set of attribute classes for attributes that represent
3634 a scalar value (bounds, pointers, ...). */
3635 enum dw_scalar_form
3636 {
3637 dw_scalar_form_constant = 0x01,
3638 dw_scalar_form_exprloc = 0x02,
3639 dw_scalar_form_reference = 0x04
3640 };
3641
3642 /* Forward declarations for functions defined in this file. */
3643
3644 static int is_pseudo_reg (const_rtx);
3645 static tree type_main_variant (tree);
3646 static int is_tagged_type (const_tree);
3647 static const char *dwarf_tag_name (unsigned);
3648 static const char *dwarf_attr_name (unsigned);
3649 static const char *dwarf_form_name (unsigned);
3650 static tree decl_ultimate_origin (const_tree);
3651 static tree decl_class_context (tree);
3652 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3653 static inline enum dw_val_class AT_class (dw_attr_node *);
3654 static inline unsigned int AT_index (dw_attr_node *);
3655 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3656 static inline unsigned AT_flag (dw_attr_node *);
3657 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3658 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3659 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3660 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3661 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3662 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3663 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3664 unsigned int, unsigned char *);
3665 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3666 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3667 static inline const char *AT_string (dw_attr_node *);
3668 static enum dwarf_form AT_string_form (dw_attr_node *);
3669 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3670 static void add_AT_specification (dw_die_ref, dw_die_ref);
3671 static inline dw_die_ref AT_ref (dw_attr_node *);
3672 static inline int AT_ref_external (dw_attr_node *);
3673 static inline void set_AT_ref_external (dw_attr_node *, int);
3674 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3675 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3676 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3677 dw_loc_list_ref);
3678 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3679 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3680 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3681 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3682 static void remove_addr_table_entry (addr_table_entry *);
3683 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3684 static inline rtx AT_addr (dw_attr_node *);
3685 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3686 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3687 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3688 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3689 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3690 unsigned long, bool);
3691 static inline const char *AT_lbl (dw_attr_node *);
3692 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3693 static const char *get_AT_low_pc (dw_die_ref);
3694 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3695 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3696 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3697 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3698 static bool is_c (void);
3699 static bool is_cxx (void);
3700 static bool is_cxx (const_tree);
3701 static bool is_fortran (void);
3702 static bool is_ada (void);
3703 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3704 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3705 static void add_child_die (dw_die_ref, dw_die_ref);
3706 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3707 static dw_die_ref lookup_type_die (tree);
3708 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3709 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3710 static void equate_type_number_to_die (tree, dw_die_ref);
3711 static dw_die_ref lookup_decl_die (tree);
3712 static var_loc_list *lookup_decl_loc (const_tree);
3713 static void equate_decl_number_to_die (tree, dw_die_ref);
3714 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3715 static void print_spaces (FILE *);
3716 static void print_die (dw_die_ref, FILE *);
3717 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3718 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3719 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3720 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3721 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3722 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3723 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3724 struct md5_ctx *, int *);
3725 struct checksum_attributes;
3726 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3727 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3728 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3729 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3730 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3731 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3732 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3733 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3734 static int is_type_die (dw_die_ref);
3735 static inline bool is_template_instantiation (dw_die_ref);
3736 static int is_declaration_die (dw_die_ref);
3737 static int should_move_die_to_comdat (dw_die_ref);
3738 static dw_die_ref clone_as_declaration (dw_die_ref);
3739 static dw_die_ref clone_die (dw_die_ref);
3740 static dw_die_ref clone_tree (dw_die_ref);
3741 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3742 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3743 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3744 static dw_die_ref generate_skeleton (dw_die_ref);
3745 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3746 dw_die_ref,
3747 dw_die_ref);
3748 static void break_out_comdat_types (dw_die_ref);
3749 static void copy_decls_for_unworthy_types (dw_die_ref);
3750
3751 static void add_sibling_attributes (dw_die_ref);
3752 static void output_location_lists (dw_die_ref);
3753 static int constant_size (unsigned HOST_WIDE_INT);
3754 static unsigned long size_of_die (dw_die_ref);
3755 static void calc_die_sizes (dw_die_ref);
3756 static void calc_base_type_die_sizes (void);
3757 static void mark_dies (dw_die_ref);
3758 static void unmark_dies (dw_die_ref);
3759 static void unmark_all_dies (dw_die_ref);
3760 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3761 static unsigned long size_of_aranges (void);
3762 static enum dwarf_form value_format (dw_attr_node *);
3763 static void output_value_format (dw_attr_node *);
3764 static void output_abbrev_section (void);
3765 static void output_die_abbrevs (unsigned long, dw_die_ref);
3766 static void output_die (dw_die_ref);
3767 static void output_compilation_unit_header (enum dwarf_unit_type);
3768 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3769 static void output_comdat_type_unit (comdat_type_node *, bool);
3770 static const char *dwarf2_name (tree, int);
3771 static void add_pubname (tree, dw_die_ref);
3772 static void add_enumerator_pubname (const char *, dw_die_ref);
3773 static void add_pubname_string (const char *, dw_die_ref);
3774 static void add_pubtype (tree, dw_die_ref);
3775 static void output_pubnames (vec<pubname_entry, va_gc> *);
3776 static void output_aranges (void);
3777 static unsigned int add_ranges (const_tree, bool = false);
3778 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3779 bool *, bool);
3780 static void output_ranges (void);
3781 static dw_line_info_table *new_line_info_table (void);
3782 static void output_line_info (bool);
3783 static void output_file_names (void);
3784 static dw_die_ref base_type_die (tree, bool);
3785 static int is_base_type (tree);
3786 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3787 static int decl_quals (const_tree);
3788 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3789 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3790 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3791 static unsigned int dbx_reg_number (const_rtx);
3792 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3793 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3794 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3795 enum var_init_status);
3796 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3797 enum var_init_status);
3798 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3799 enum var_init_status);
3800 static int is_based_loc (const_rtx);
3801 static bool resolve_one_addr (rtx *);
3802 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3803 enum var_init_status);
3804 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3805 enum var_init_status);
3806 struct loc_descr_context;
3807 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3808 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3809 static dw_loc_list_ref loc_list_from_tree (tree, int,
3810 struct loc_descr_context *);
3811 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3812 struct loc_descr_context *);
3813 static tree field_type (const_tree);
3814 static unsigned int simple_type_align_in_bits (const_tree);
3815 static unsigned int simple_decl_align_in_bits (const_tree);
3816 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3817 struct vlr_context;
3818 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3819 HOST_WIDE_INT *);
3820 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3821 dw_loc_list_ref);
3822 static void add_data_member_location_attribute (dw_die_ref, tree,
3823 struct vlr_context *);
3824 static bool add_const_value_attribute (dw_die_ref, rtx);
3825 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3826 static void insert_wide_int (const wide_int &, unsigned char *, int);
3827 static void insert_float (const_rtx, unsigned char *);
3828 static rtx rtl_for_decl_location (tree);
3829 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3830 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3831 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3832 static void add_name_attribute (dw_die_ref, const char *);
3833 static void add_desc_attribute (dw_die_ref, tree);
3834 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3835 static void add_comp_dir_attribute (dw_die_ref);
3836 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3837 struct loc_descr_context *);
3838 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3839 struct loc_descr_context *);
3840 static void add_subscript_info (dw_die_ref, tree, bool);
3841 static void add_byte_size_attribute (dw_die_ref, tree);
3842 static void add_alignment_attribute (dw_die_ref, tree);
3843 static void add_bit_offset_attribute (dw_die_ref, tree);
3844 static void add_bit_size_attribute (dw_die_ref, tree);
3845 static void add_prototyped_attribute (dw_die_ref, tree);
3846 static void add_abstract_origin_attribute (dw_die_ref, tree);
3847 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3848 static void add_src_coords_attributes (dw_die_ref, tree);
3849 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3850 static void add_discr_value (dw_die_ref, dw_discr_value *);
3851 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3852 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3853 static dw_die_ref scope_die_for (tree, dw_die_ref);
3854 static inline int local_scope_p (dw_die_ref);
3855 static inline int class_scope_p (dw_die_ref);
3856 static inline int class_or_namespace_scope_p (dw_die_ref);
3857 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3858 static void add_calling_convention_attribute (dw_die_ref, tree);
3859 static const char *type_tag (const_tree);
3860 static tree member_declared_type (const_tree);
3861 #if 0
3862 static const char *decl_start_label (tree);
3863 #endif
3864 static void gen_array_type_die (tree, dw_die_ref);
3865 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3866 #if 0
3867 static void gen_entry_point_die (tree, dw_die_ref);
3868 #endif
3869 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3870 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3871 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3872 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3873 static void gen_formal_types_die (tree, dw_die_ref);
3874 static void gen_subprogram_die (tree, dw_die_ref);
3875 static void gen_variable_die (tree, tree, dw_die_ref);
3876 static void gen_const_die (tree, dw_die_ref);
3877 static void gen_label_die (tree, dw_die_ref);
3878 static void gen_lexical_block_die (tree, dw_die_ref);
3879 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3880 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3881 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3882 static dw_die_ref gen_compile_unit_die (const char *);
3883 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3884 static void gen_member_die (tree, dw_die_ref);
3885 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3886 enum debug_info_usage);
3887 static void gen_subroutine_type_die (tree, dw_die_ref);
3888 static void gen_typedef_die (tree, dw_die_ref);
3889 static void gen_type_die (tree, dw_die_ref);
3890 static void gen_block_die (tree, dw_die_ref);
3891 static void decls_for_scope (tree, dw_die_ref, bool = true);
3892 static bool is_naming_typedef_decl (const_tree);
3893 static inline dw_die_ref get_context_die (tree);
3894 static void gen_namespace_die (tree, dw_die_ref);
3895 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3896 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3897 static dw_die_ref force_decl_die (tree);
3898 static dw_die_ref force_type_die (tree);
3899 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3900 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3901 static struct dwarf_file_data * lookup_filename (const char *);
3902 static void retry_incomplete_types (void);
3903 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3904 static void gen_generic_params_dies (tree);
3905 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3906 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3907 static void splice_child_die (dw_die_ref, dw_die_ref);
3908 static int file_info_cmp (const void *, const void *);
3909 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3910 const char *, var_loc_view, const char *);
3911 static void output_loc_list (dw_loc_list_ref);
3912 static char *gen_internal_sym (const char *);
3913 static bool want_pubnames (void);
3914
3915 static void prune_unmark_dies (dw_die_ref);
3916 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3917 static void prune_unused_types_mark (dw_die_ref, int);
3918 static void prune_unused_types_walk (dw_die_ref);
3919 static void prune_unused_types_walk_attribs (dw_die_ref);
3920 static void prune_unused_types_prune (dw_die_ref);
3921 static void prune_unused_types (void);
3922 static int maybe_emit_file (struct dwarf_file_data *fd);
3923 static inline const char *AT_vms_delta1 (dw_attr_node *);
3924 static inline const char *AT_vms_delta2 (dw_attr_node *);
3925 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3926 const char *, const char *);
3927 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3928 static void gen_remaining_tmpl_value_param_die_attribute (void);
3929 static bool generic_type_p (tree);
3930 static void schedule_generic_params_dies_gen (tree t);
3931 static void gen_scheduled_generic_parms_dies (void);
3932 static void resolve_variable_values (void);
3933
3934 static const char *comp_dir_string (void);
3935
3936 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3937
3938 /* enum for tracking thread-local variables whose address is really an offset
3939 relative to the TLS pointer, which will need link-time relocation, but will
3940 not need relocation by the DWARF consumer. */
3941
3942 enum dtprel_bool
3943 {
3944 dtprel_false = 0,
3945 dtprel_true = 1
3946 };
3947
3948 /* Return the operator to use for an address of a variable. For dtprel_true, we
3949 use DW_OP_const*. For regular variables, which need both link-time
3950 relocation and consumer-level relocation (e.g., to account for shared objects
3951 loaded at a random address), we use DW_OP_addr*. */
3952
3953 static inline enum dwarf_location_atom
3954 dw_addr_op (enum dtprel_bool dtprel)
3955 {
3956 if (dtprel == dtprel_true)
3957 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3958 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3959 else
3960 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3961 }
3962
3963 /* Return a pointer to a newly allocated address location description. If
3964 dwarf_split_debug_info is true, then record the address with the appropriate
3965 relocation. */
3966 static inline dw_loc_descr_ref
3967 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3968 {
3969 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3970
3971 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3972 ref->dw_loc_oprnd1.v.val_addr = addr;
3973 ref->dtprel = dtprel;
3974 if (dwarf_split_debug_info)
3975 ref->dw_loc_oprnd1.val_entry
3976 = add_addr_table_entry (addr,
3977 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3978 else
3979 ref->dw_loc_oprnd1.val_entry = NULL;
3980
3981 return ref;
3982 }
3983
3984 /* Section names used to hold DWARF debugging information. */
3985
3986 #ifndef DEBUG_INFO_SECTION
3987 #define DEBUG_INFO_SECTION ".debug_info"
3988 #endif
3989 #ifndef DEBUG_DWO_INFO_SECTION
3990 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_INFO_SECTION
3993 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3994 #endif
3995 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3996 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3997 #endif
3998 #ifndef DEBUG_ABBREV_SECTION
3999 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
4000 #endif
4001 #ifndef DEBUG_LTO_ABBREV_SECTION
4002 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
4003 #endif
4004 #ifndef DEBUG_DWO_ABBREV_SECTION
4005 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
4006 #endif
4007 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
4008 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
4009 #endif
4010 #ifndef DEBUG_ARANGES_SECTION
4011 #define DEBUG_ARANGES_SECTION ".debug_aranges"
4012 #endif
4013 #ifndef DEBUG_ADDR_SECTION
4014 #define DEBUG_ADDR_SECTION ".debug_addr"
4015 #endif
4016 #ifndef DEBUG_MACINFO_SECTION
4017 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4018 #endif
4019 #ifndef DEBUG_LTO_MACINFO_SECTION
4020 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4021 #endif
4022 #ifndef DEBUG_DWO_MACINFO_SECTION
4023 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4024 #endif
4025 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4026 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4027 #endif
4028 #ifndef DEBUG_MACRO_SECTION
4029 #define DEBUG_MACRO_SECTION ".debug_macro"
4030 #endif
4031 #ifndef DEBUG_LTO_MACRO_SECTION
4032 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4033 #endif
4034 #ifndef DEBUG_DWO_MACRO_SECTION
4035 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4036 #endif
4037 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4038 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4039 #endif
4040 #ifndef DEBUG_LINE_SECTION
4041 #define DEBUG_LINE_SECTION ".debug_line"
4042 #endif
4043 #ifndef DEBUG_LTO_LINE_SECTION
4044 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4045 #endif
4046 #ifndef DEBUG_DWO_LINE_SECTION
4047 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4048 #endif
4049 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4050 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4051 #endif
4052 #ifndef DEBUG_LOC_SECTION
4053 #define DEBUG_LOC_SECTION ".debug_loc"
4054 #endif
4055 #ifndef DEBUG_DWO_LOC_SECTION
4056 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4057 #endif
4058 #ifndef DEBUG_LOCLISTS_SECTION
4059 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4060 #endif
4061 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4062 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4063 #endif
4064 #ifndef DEBUG_PUBNAMES_SECTION
4065 #define DEBUG_PUBNAMES_SECTION \
4066 ((debug_generate_pub_sections == 2) \
4067 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4068 #endif
4069 #ifndef DEBUG_PUBTYPES_SECTION
4070 #define DEBUG_PUBTYPES_SECTION \
4071 ((debug_generate_pub_sections == 2) \
4072 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4073 #endif
4074 #ifndef DEBUG_STR_OFFSETS_SECTION
4075 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4076 #endif
4077 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4078 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4079 #endif
4080 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4081 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4082 #endif
4083 #ifndef DEBUG_STR_SECTION
4084 #define DEBUG_STR_SECTION ".debug_str"
4085 #endif
4086 #ifndef DEBUG_LTO_STR_SECTION
4087 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4088 #endif
4089 #ifndef DEBUG_STR_DWO_SECTION
4090 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4091 #endif
4092 #ifndef DEBUG_LTO_STR_DWO_SECTION
4093 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4094 #endif
4095 #ifndef DEBUG_RANGES_SECTION
4096 #define DEBUG_RANGES_SECTION ".debug_ranges"
4097 #endif
4098 #ifndef DEBUG_RNGLISTS_SECTION
4099 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4100 #endif
4101 #ifndef DEBUG_LINE_STR_SECTION
4102 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4103 #endif
4104 #ifndef DEBUG_LTO_LINE_STR_SECTION
4105 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4106 #endif
4107
4108 /* Standard ELF section names for compiled code and data. */
4109 #ifndef TEXT_SECTION_NAME
4110 #define TEXT_SECTION_NAME ".text"
4111 #endif
4112
4113 /* Section flags for .debug_str section. */
4114 #define DEBUG_STR_SECTION_FLAGS \
4115 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4116 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4117 : SECTION_DEBUG)
4118
4119 /* Section flags for .debug_str.dwo section. */
4120 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4121
4122 /* Attribute used to refer to the macro section. */
4123 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4124 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4125
4126 /* Labels we insert at beginning sections we can reference instead of
4127 the section names themselves. */
4128
4129 #ifndef TEXT_SECTION_LABEL
4130 #define TEXT_SECTION_LABEL "Ltext"
4131 #endif
4132 #ifndef COLD_TEXT_SECTION_LABEL
4133 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4134 #endif
4135 #ifndef DEBUG_LINE_SECTION_LABEL
4136 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4137 #endif
4138 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4139 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4140 #endif
4141 #ifndef DEBUG_INFO_SECTION_LABEL
4142 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4143 #endif
4144 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4145 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4146 #endif
4147 #ifndef DEBUG_ABBREV_SECTION_LABEL
4148 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4149 #endif
4150 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4151 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4152 #endif
4153 #ifndef DEBUG_ADDR_SECTION_LABEL
4154 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4155 #endif
4156 #ifndef DEBUG_LOC_SECTION_LABEL
4157 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4158 #endif
4159 #ifndef DEBUG_RANGES_SECTION_LABEL
4160 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4161 #endif
4162 #ifndef DEBUG_MACINFO_SECTION_LABEL
4163 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4164 #endif
4165 #ifndef DEBUG_MACRO_SECTION_LABEL
4166 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4167 #endif
4168 #define SKELETON_COMP_DIE_ABBREV 1
4169 #define SKELETON_TYPE_DIE_ABBREV 2
4170
4171 /* Definitions of defaults for formats and names of various special
4172 (artificial) labels which may be generated within this file (when the -g
4173 options is used and DWARF2_DEBUGGING_INFO is in effect.
4174 If necessary, these may be overridden from within the tm.h file, but
4175 typically, overriding these defaults is unnecessary. */
4176
4177 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4179 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4180 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4181 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4182 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4183 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4184 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4185 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4186 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4187 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4188 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4189 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4190 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4191 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4192
4193 #ifndef TEXT_END_LABEL
4194 #define TEXT_END_LABEL "Letext"
4195 #endif
4196 #ifndef COLD_END_LABEL
4197 #define COLD_END_LABEL "Letext_cold"
4198 #endif
4199 #ifndef BLOCK_BEGIN_LABEL
4200 #define BLOCK_BEGIN_LABEL "LBB"
4201 #endif
4202 #ifndef BLOCK_INLINE_ENTRY_LABEL
4203 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4204 #endif
4205 #ifndef BLOCK_END_LABEL
4206 #define BLOCK_END_LABEL "LBE"
4207 #endif
4208 #ifndef LINE_CODE_LABEL
4209 #define LINE_CODE_LABEL "LM"
4210 #endif
4211
4212 \f
4213 /* Return the root of the DIE's built for the current compilation unit. */
4214 static dw_die_ref
4215 comp_unit_die (void)
4216 {
4217 if (!single_comp_unit_die)
4218 single_comp_unit_die = gen_compile_unit_die (NULL);
4219 return single_comp_unit_die;
4220 }
4221
4222 /* We allow a language front-end to designate a function that is to be
4223 called to "demangle" any name before it is put into a DIE. */
4224
4225 static const char *(*demangle_name_func) (const char *);
4226
4227 void
4228 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4229 {
4230 demangle_name_func = func;
4231 }
4232
4233 /* Test if rtl node points to a pseudo register. */
4234
4235 static inline int
4236 is_pseudo_reg (const_rtx rtl)
4237 {
4238 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4239 || (GET_CODE (rtl) == SUBREG
4240 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4241 }
4242
4243 /* Return a reference to a type, with its const and volatile qualifiers
4244 removed. */
4245
4246 static inline tree
4247 type_main_variant (tree type)
4248 {
4249 type = TYPE_MAIN_VARIANT (type);
4250
4251 /* ??? There really should be only one main variant among any group of
4252 variants of a given type (and all of the MAIN_VARIANT values for all
4253 members of the group should point to that one type) but sometimes the C
4254 front-end messes this up for array types, so we work around that bug
4255 here. */
4256 if (TREE_CODE (type) == ARRAY_TYPE)
4257 while (type != TYPE_MAIN_VARIANT (type))
4258 type = TYPE_MAIN_VARIANT (type);
4259
4260 return type;
4261 }
4262
4263 /* Return nonzero if the given type node represents a tagged type. */
4264
4265 static inline int
4266 is_tagged_type (const_tree type)
4267 {
4268 enum tree_code code = TREE_CODE (type);
4269
4270 return (code == RECORD_TYPE || code == UNION_TYPE
4271 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4272 }
4273
4274 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4275
4276 static void
4277 get_ref_die_offset_label (char *label, dw_die_ref ref)
4278 {
4279 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4280 }
4281
4282 /* Return die_offset of a DIE reference to a base type. */
4283
4284 static unsigned long int
4285 get_base_type_offset (dw_die_ref ref)
4286 {
4287 if (ref->die_offset)
4288 return ref->die_offset;
4289 if (comp_unit_die ()->die_abbrev)
4290 {
4291 calc_base_type_die_sizes ();
4292 gcc_assert (ref->die_offset);
4293 }
4294 return ref->die_offset;
4295 }
4296
4297 /* Return die_offset of a DIE reference other than base type. */
4298
4299 static unsigned long int
4300 get_ref_die_offset (dw_die_ref ref)
4301 {
4302 gcc_assert (ref->die_offset);
4303 return ref->die_offset;
4304 }
4305
4306 /* Convert a DIE tag into its string name. */
4307
4308 static const char *
4309 dwarf_tag_name (unsigned int tag)
4310 {
4311 const char *name = get_DW_TAG_name (tag);
4312
4313 if (name != NULL)
4314 return name;
4315
4316 return "DW_TAG_<unknown>";
4317 }
4318
4319 /* Convert a DWARF attribute code into its string name. */
4320
4321 static const char *
4322 dwarf_attr_name (unsigned int attr)
4323 {
4324 const char *name;
4325
4326 switch (attr)
4327 {
4328 #if VMS_DEBUGGING_INFO
4329 case DW_AT_HP_prologue:
4330 return "DW_AT_HP_prologue";
4331 #else
4332 case DW_AT_MIPS_loop_unroll_factor:
4333 return "DW_AT_MIPS_loop_unroll_factor";
4334 #endif
4335
4336 #if VMS_DEBUGGING_INFO
4337 case DW_AT_HP_epilogue:
4338 return "DW_AT_HP_epilogue";
4339 #else
4340 case DW_AT_MIPS_stride:
4341 return "DW_AT_MIPS_stride";
4342 #endif
4343 }
4344
4345 name = get_DW_AT_name (attr);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_AT_<unknown>";
4351 }
4352
4353 /* Convert a DWARF value form code into its string name. */
4354
4355 static const char *
4356 dwarf_form_name (unsigned int form)
4357 {
4358 const char *name = get_DW_FORM_name (form);
4359
4360 if (name != NULL)
4361 return name;
4362
4363 return "DW_FORM_<unknown>";
4364 }
4365 \f
4366 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4367 instance of an inlined instance of a decl which is local to an inline
4368 function, so we have to trace all of the way back through the origin chain
4369 to find out what sort of node actually served as the original seed for the
4370 given block. */
4371
4372 static tree
4373 decl_ultimate_origin (const_tree decl)
4374 {
4375 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4376 return NULL_TREE;
4377
4378 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4379 we're trying to output the abstract instance of this function. */
4380 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4381 return NULL_TREE;
4382
4383 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4384 most distant ancestor, this should never happen. */
4385 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4386
4387 return DECL_ABSTRACT_ORIGIN (decl);
4388 }
4389
4390 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4391 of a virtual function may refer to a base class, so we check the 'this'
4392 parameter. */
4393
4394 static tree
4395 decl_class_context (tree decl)
4396 {
4397 tree context = NULL_TREE;
4398
4399 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4400 context = DECL_CONTEXT (decl);
4401 else
4402 context = TYPE_MAIN_VARIANT
4403 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4404
4405 if (context && !TYPE_P (context))
4406 context = NULL_TREE;
4407
4408 return context;
4409 }
4410 \f
4411 /* Add an attribute/value pair to a DIE. */
4412
4413 static inline void
4414 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4415 {
4416 /* Maybe this should be an assert? */
4417 if (die == NULL)
4418 return;
4419
4420 if (flag_checking)
4421 {
4422 /* Check we do not add duplicate attrs. Can't use get_AT here
4423 because that recurses to the specification/abstract origin DIE. */
4424 dw_attr_node *a;
4425 unsigned ix;
4426 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4427 gcc_assert (a->dw_attr != attr->dw_attr);
4428 }
4429
4430 vec_safe_reserve (die->die_attr, 1);
4431 vec_safe_push (die->die_attr, *attr);
4432 }
4433
4434 static inline enum dw_val_class
4435 AT_class (dw_attr_node *a)
4436 {
4437 return a->dw_attr_val.val_class;
4438 }
4439
4440 /* Return the index for any attribute that will be referenced with a
4441 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4442 indices are stored in dw_attr_val.v.val_str for reference counting
4443 pruning. */
4444
4445 static inline unsigned int
4446 AT_index (dw_attr_node *a)
4447 {
4448 if (AT_class (a) == dw_val_class_str)
4449 return a->dw_attr_val.v.val_str->index;
4450 else if (a->dw_attr_val.val_entry != NULL)
4451 return a->dw_attr_val.val_entry->index;
4452 return NOT_INDEXED;
4453 }
4454
4455 /* Add a flag value attribute to a DIE. */
4456
4457 static inline void
4458 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_flag;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_flag = flag;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline unsigned
4470 AT_flag (dw_attr_node *a)
4471 {
4472 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4473 return a->dw_attr_val.v.val_flag;
4474 }
4475
4476 /* Add a signed integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4480 {
4481 dw_attr_node attr;
4482
4483 attr.dw_attr = attr_kind;
4484 attr.dw_attr_val.val_class = dw_val_class_const;
4485 attr.dw_attr_val.val_entry = NULL;
4486 attr.dw_attr_val.v.val_int = int_val;
4487 add_dwarf_attr (die, &attr);
4488 }
4489
4490 static inline HOST_WIDE_INT
4491 AT_int (dw_attr_node *a)
4492 {
4493 gcc_assert (a && (AT_class (a) == dw_val_class_const
4494 || AT_class (a) == dw_val_class_const_implicit));
4495 return a->dw_attr_val.v.val_int;
4496 }
4497
4498 /* Add an unsigned integer attribute value to a DIE. */
4499
4500 static inline void
4501 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4502 unsigned HOST_WIDE_INT unsigned_val)
4503 {
4504 dw_attr_node attr;
4505
4506 attr.dw_attr = attr_kind;
4507 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4508 attr.dw_attr_val.val_entry = NULL;
4509 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4510 add_dwarf_attr (die, &attr);
4511 }
4512
4513 static inline unsigned HOST_WIDE_INT
4514 AT_unsigned (dw_attr_node *a)
4515 {
4516 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4517 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4518 return a->dw_attr_val.v.val_unsigned;
4519 }
4520
4521 /* Add an unsigned wide integer attribute value to a DIE. */
4522
4523 static inline void
4524 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4525 const wide_int& w)
4526 {
4527 dw_attr_node attr;
4528
4529 attr.dw_attr = attr_kind;
4530 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4531 attr.dw_attr_val.val_entry = NULL;
4532 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4533 *attr.dw_attr_val.v.val_wide = w;
4534 add_dwarf_attr (die, &attr);
4535 }
4536
4537 /* Add an unsigned double integer attribute value to a DIE. */
4538
4539 static inline void
4540 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4541 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4542 {
4543 dw_attr_node attr;
4544
4545 attr.dw_attr = attr_kind;
4546 attr.dw_attr_val.val_class = dw_val_class_const_double;
4547 attr.dw_attr_val.val_entry = NULL;
4548 attr.dw_attr_val.v.val_double.high = high;
4549 attr.dw_attr_val.v.val_double.low = low;
4550 add_dwarf_attr (die, &attr);
4551 }
4552
4553 /* Add a floating point attribute value to a DIE and return it. */
4554
4555 static inline void
4556 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4557 unsigned int length, unsigned int elt_size, unsigned char *array)
4558 {
4559 dw_attr_node attr;
4560
4561 attr.dw_attr = attr_kind;
4562 attr.dw_attr_val.val_class = dw_val_class_vec;
4563 attr.dw_attr_val.val_entry = NULL;
4564 attr.dw_attr_val.v.val_vec.length = length;
4565 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4566 attr.dw_attr_val.v.val_vec.array = array;
4567 add_dwarf_attr (die, &attr);
4568 }
4569
4570 /* Add an 8-byte data attribute value to a DIE. */
4571
4572 static inline void
4573 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4574 unsigned char data8[8])
4575 {
4576 dw_attr_node attr;
4577
4578 attr.dw_attr = attr_kind;
4579 attr.dw_attr_val.val_class = dw_val_class_data8;
4580 attr.dw_attr_val.val_entry = NULL;
4581 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4582 add_dwarf_attr (die, &attr);
4583 }
4584
4585 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4586 dwarf_split_debug_info, address attributes in dies destined for the
4587 final executable have force_direct set to avoid using indexed
4588 references. */
4589
4590 static inline void
4591 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4592 bool force_direct)
4593 {
4594 dw_attr_node attr;
4595 char * lbl_id;
4596
4597 lbl_id = xstrdup (lbl_low);
4598 attr.dw_attr = DW_AT_low_pc;
4599 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4600 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4601 if (dwarf_split_debug_info && !force_direct)
4602 attr.dw_attr_val.val_entry
4603 = add_addr_table_entry (lbl_id, ate_kind_label);
4604 else
4605 attr.dw_attr_val.val_entry = NULL;
4606 add_dwarf_attr (die, &attr);
4607
4608 attr.dw_attr = DW_AT_high_pc;
4609 if (dwarf_version < 4)
4610 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4611 else
4612 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4613 lbl_id = xstrdup (lbl_high);
4614 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4615 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4616 && dwarf_split_debug_info && !force_direct)
4617 attr.dw_attr_val.val_entry
4618 = add_addr_table_entry (lbl_id, ate_kind_label);
4619 else
4620 attr.dw_attr_val.val_entry = NULL;
4621 add_dwarf_attr (die, &attr);
4622 }
4623
4624 /* Hash and equality functions for debug_str_hash. */
4625
4626 hashval_t
4627 indirect_string_hasher::hash (indirect_string_node *x)
4628 {
4629 return htab_hash_string (x->str);
4630 }
4631
4632 bool
4633 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4634 {
4635 return strcmp (x1->str, x2) == 0;
4636 }
4637
4638 /* Add STR to the given string hash table. */
4639
4640 static struct indirect_string_node *
4641 find_AT_string_in_table (const char *str,
4642 hash_table<indirect_string_hasher> *table,
4643 enum insert_option insert = INSERT)
4644 {
4645 struct indirect_string_node *node;
4646
4647 indirect_string_node **slot
4648 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4649 if (*slot == NULL)
4650 {
4651 node = ggc_cleared_alloc<indirect_string_node> ();
4652 node->str = ggc_strdup (str);
4653 *slot = node;
4654 }
4655 else
4656 node = *slot;
4657
4658 node->refcount++;
4659 return node;
4660 }
4661
4662 /* Add STR to the indirect string hash table. */
4663
4664 static struct indirect_string_node *
4665 find_AT_string (const char *str, enum insert_option insert = INSERT)
4666 {
4667 if (! debug_str_hash)
4668 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4669
4670 return find_AT_string_in_table (str, debug_str_hash, insert);
4671 }
4672
4673 /* Add a string attribute value to a DIE. */
4674
4675 static inline void
4676 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4677 {
4678 dw_attr_node attr;
4679 struct indirect_string_node *node;
4680
4681 node = find_AT_string (str);
4682
4683 attr.dw_attr = attr_kind;
4684 attr.dw_attr_val.val_class = dw_val_class_str;
4685 attr.dw_attr_val.val_entry = NULL;
4686 attr.dw_attr_val.v.val_str = node;
4687 add_dwarf_attr (die, &attr);
4688 }
4689
4690 static inline const char *
4691 AT_string (dw_attr_node *a)
4692 {
4693 gcc_assert (a && AT_class (a) == dw_val_class_str);
4694 return a->dw_attr_val.v.val_str->str;
4695 }
4696
4697 /* Call this function directly to bypass AT_string_form's logic to put
4698 the string inline in the die. */
4699
4700 static void
4701 set_indirect_string (struct indirect_string_node *node)
4702 {
4703 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4704 /* Already indirect is a no op. */
4705 if (node->form == DW_FORM_strp
4706 || node->form == DW_FORM_line_strp
4707 || node->form == dwarf_FORM (DW_FORM_strx))
4708 {
4709 gcc_assert (node->label);
4710 return;
4711 }
4712 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4713 ++dw2_string_counter;
4714 node->label = xstrdup (label);
4715
4716 if (!dwarf_split_debug_info)
4717 {
4718 node->form = DW_FORM_strp;
4719 node->index = NOT_INDEXED;
4720 }
4721 else
4722 {
4723 node->form = dwarf_FORM (DW_FORM_strx);
4724 node->index = NO_INDEX_ASSIGNED;
4725 }
4726 }
4727
4728 /* A helper function for dwarf2out_finish, called to reset indirect
4729 string decisions done for early LTO dwarf output before fat object
4730 dwarf output. */
4731
4732 int
4733 reset_indirect_string (indirect_string_node **h, void *)
4734 {
4735 struct indirect_string_node *node = *h;
4736 if (node->form == DW_FORM_strp
4737 || node->form == DW_FORM_line_strp
4738 || node->form == dwarf_FORM (DW_FORM_strx))
4739 {
4740 free (node->label);
4741 node->label = NULL;
4742 node->form = (dwarf_form) 0;
4743 node->index = 0;
4744 }
4745 return 1;
4746 }
4747
4748 /* Add a string representing a file or filepath attribute value to a DIE. */
4749
4750 static inline void
4751 add_filepath_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
4752 const char *str)
4753 {
4754 if (! asm_outputs_debug_line_str ())
4755 add_AT_string (die, attr_kind, str);
4756 else
4757 {
4758 dw_attr_node attr;
4759 struct indirect_string_node *node;
4760
4761 if (!debug_line_str_hash)
4762 debug_line_str_hash
4763 = hash_table<indirect_string_hasher>::create_ggc (10);
4764
4765 node = find_AT_string_in_table (str, debug_line_str_hash);
4766 set_indirect_string (node);
4767 node->form = DW_FORM_line_strp;
4768
4769 attr.dw_attr = attr_kind;
4770 attr.dw_attr_val.val_class = dw_val_class_str;
4771 attr.dw_attr_val.val_entry = NULL;
4772 attr.dw_attr_val.v.val_str = node;
4773 add_dwarf_attr (die, &attr);
4774 }
4775 }
4776
4777 /* Find out whether a string should be output inline in DIE
4778 or out-of-line in .debug_str section. */
4779
4780 static enum dwarf_form
4781 find_string_form (struct indirect_string_node *node)
4782 {
4783 unsigned int len;
4784
4785 if (node->form)
4786 return node->form;
4787
4788 len = strlen (node->str) + 1;
4789
4790 /* If the string is shorter or equal to the size of the reference, it is
4791 always better to put it inline. */
4792 if (len <= (unsigned) dwarf_offset_size || node->refcount == 0)
4793 return node->form = DW_FORM_string;
4794
4795 /* If we cannot expect the linker to merge strings in .debug_str
4796 section, only put it into .debug_str if it is worth even in this
4797 single module. */
4798 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4799 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4800 && (len - dwarf_offset_size) * node->refcount <= len))
4801 return node->form = DW_FORM_string;
4802
4803 set_indirect_string (node);
4804
4805 return node->form;
4806 }
4807
4808 /* Find out whether the string referenced from the attribute should be
4809 output inline in DIE or out-of-line in .debug_str section. */
4810
4811 static enum dwarf_form
4812 AT_string_form (dw_attr_node *a)
4813 {
4814 gcc_assert (a && AT_class (a) == dw_val_class_str);
4815 return find_string_form (a->dw_attr_val.v.val_str);
4816 }
4817
4818 /* Add a DIE reference attribute value to a DIE. */
4819
4820 static inline void
4821 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4822 {
4823 dw_attr_node attr;
4824 gcc_checking_assert (targ_die != NULL);
4825
4826 /* With LTO we can end up trying to reference something we didn't create
4827 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4828 if (targ_die == NULL)
4829 return;
4830
4831 attr.dw_attr = attr_kind;
4832 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4833 attr.dw_attr_val.val_entry = NULL;
4834 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4835 attr.dw_attr_val.v.val_die_ref.external = 0;
4836 add_dwarf_attr (die, &attr);
4837 }
4838
4839 /* Change DIE reference REF to point to NEW_DIE instead. */
4840
4841 static inline void
4842 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4843 {
4844 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4845 ref->dw_attr_val.v.val_die_ref.die = new_die;
4846 ref->dw_attr_val.v.val_die_ref.external = 0;
4847 }
4848
4849 /* Add an AT_specification attribute to a DIE, and also make the back
4850 pointer from the specification to the definition. */
4851
4852 static inline void
4853 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4854 {
4855 add_AT_die_ref (die, DW_AT_specification, targ_die);
4856 gcc_assert (!targ_die->die_definition);
4857 targ_die->die_definition = die;
4858 }
4859
4860 static inline dw_die_ref
4861 AT_ref (dw_attr_node *a)
4862 {
4863 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4864 return a->dw_attr_val.v.val_die_ref.die;
4865 }
4866
4867 static inline int
4868 AT_ref_external (dw_attr_node *a)
4869 {
4870 if (a && AT_class (a) == dw_val_class_die_ref)
4871 return a->dw_attr_val.v.val_die_ref.external;
4872
4873 return 0;
4874 }
4875
4876 static inline void
4877 set_AT_ref_external (dw_attr_node *a, int i)
4878 {
4879 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4880 a->dw_attr_val.v.val_die_ref.external = i;
4881 }
4882
4883 /* Add a location description attribute value to a DIE. */
4884
4885 static inline void
4886 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4887 {
4888 dw_attr_node attr;
4889
4890 attr.dw_attr = attr_kind;
4891 attr.dw_attr_val.val_class = dw_val_class_loc;
4892 attr.dw_attr_val.val_entry = NULL;
4893 attr.dw_attr_val.v.val_loc = loc;
4894 add_dwarf_attr (die, &attr);
4895 }
4896
4897 static inline dw_loc_descr_ref
4898 AT_loc (dw_attr_node *a)
4899 {
4900 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4901 return a->dw_attr_val.v.val_loc;
4902 }
4903
4904 static inline void
4905 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4906 {
4907 dw_attr_node attr;
4908
4909 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4910 return;
4911
4912 attr.dw_attr = attr_kind;
4913 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4914 attr.dw_attr_val.val_entry = NULL;
4915 attr.dw_attr_val.v.val_loc_list = loc_list;
4916 add_dwarf_attr (die, &attr);
4917 have_location_lists = true;
4918 }
4919
4920 static inline dw_loc_list_ref
4921 AT_loc_list (dw_attr_node *a)
4922 {
4923 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4924 return a->dw_attr_val.v.val_loc_list;
4925 }
4926
4927 /* Add a view list attribute to DIE. It must have a DW_AT_location
4928 attribute, because the view list complements the location list. */
4929
4930 static inline void
4931 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4932 {
4933 dw_attr_node attr;
4934
4935 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4936 return;
4937
4938 attr.dw_attr = attr_kind;
4939 attr.dw_attr_val.val_class = dw_val_class_view_list;
4940 attr.dw_attr_val.val_entry = NULL;
4941 attr.dw_attr_val.v.val_view_list = die;
4942 add_dwarf_attr (die, &attr);
4943 gcc_checking_assert (get_AT (die, DW_AT_location));
4944 gcc_assert (have_location_lists);
4945 }
4946
4947 /* Return a pointer to the location list referenced by the attribute.
4948 If the named attribute is a view list, look up the corresponding
4949 DW_AT_location attribute and return its location list. */
4950
4951 static inline dw_loc_list_ref *
4952 AT_loc_list_ptr (dw_attr_node *a)
4953 {
4954 gcc_assert (a);
4955 switch (AT_class (a))
4956 {
4957 case dw_val_class_loc_list:
4958 return &a->dw_attr_val.v.val_loc_list;
4959 case dw_val_class_view_list:
4960 {
4961 dw_attr_node *l;
4962 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4963 if (!l)
4964 return NULL;
4965 gcc_checking_assert (l + 1 == a);
4966 return AT_loc_list_ptr (l);
4967 }
4968 default:
4969 gcc_unreachable ();
4970 }
4971 }
4972
4973 /* Return the location attribute value associated with a view list
4974 attribute value. */
4975
4976 static inline dw_val_node *
4977 view_list_to_loc_list_val_node (dw_val_node *val)
4978 {
4979 gcc_assert (val->val_class == dw_val_class_view_list);
4980 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4981 if (!loc)
4982 return NULL;
4983 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4984 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4985 return &loc->dw_attr_val;
4986 }
4987
4988 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4989 {
4990 static hashval_t hash (addr_table_entry *);
4991 static bool equal (addr_table_entry *, addr_table_entry *);
4992 };
4993
4994 /* Table of entries into the .debug_addr section. */
4995
4996 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4997
4998 /* Hash an address_table_entry. */
4999
5000 hashval_t
5001 addr_hasher::hash (addr_table_entry *a)
5002 {
5003 inchash::hash hstate;
5004 switch (a->kind)
5005 {
5006 case ate_kind_rtx:
5007 hstate.add_int (0);
5008 break;
5009 case ate_kind_rtx_dtprel:
5010 hstate.add_int (1);
5011 break;
5012 case ate_kind_label:
5013 return htab_hash_string (a->addr.label);
5014 default:
5015 gcc_unreachable ();
5016 }
5017 inchash::add_rtx (a->addr.rtl, hstate);
5018 return hstate.end ();
5019 }
5020
5021 /* Determine equality for two address_table_entries. */
5022
5023 bool
5024 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
5025 {
5026 if (a1->kind != a2->kind)
5027 return 0;
5028 switch (a1->kind)
5029 {
5030 case ate_kind_rtx:
5031 case ate_kind_rtx_dtprel:
5032 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5033 case ate_kind_label:
5034 return strcmp (a1->addr.label, a2->addr.label) == 0;
5035 default:
5036 gcc_unreachable ();
5037 }
5038 }
5039
5040 /* Initialize an addr_table_entry. */
5041
5042 void
5043 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5044 {
5045 e->kind = kind;
5046 switch (kind)
5047 {
5048 case ate_kind_rtx:
5049 case ate_kind_rtx_dtprel:
5050 e->addr.rtl = (rtx) addr;
5051 break;
5052 case ate_kind_label:
5053 e->addr.label = (char *) addr;
5054 break;
5055 }
5056 e->refcount = 0;
5057 e->index = NO_INDEX_ASSIGNED;
5058 }
5059
5060 /* Add attr to the address table entry to the table. Defer setting an
5061 index until output time. */
5062
5063 static addr_table_entry *
5064 add_addr_table_entry (void *addr, enum ate_kind kind)
5065 {
5066 addr_table_entry *node;
5067 addr_table_entry finder;
5068
5069 gcc_assert (dwarf_split_debug_info);
5070 if (! addr_index_table)
5071 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5072 init_addr_table_entry (&finder, kind, addr);
5073 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5074
5075 if (*slot == HTAB_EMPTY_ENTRY)
5076 {
5077 node = ggc_cleared_alloc<addr_table_entry> ();
5078 init_addr_table_entry (node, kind, addr);
5079 *slot = node;
5080 }
5081 else
5082 node = *slot;
5083
5084 node->refcount++;
5085 return node;
5086 }
5087
5088 /* Remove an entry from the addr table by decrementing its refcount.
5089 Strictly, decrementing the refcount would be enough, but the
5090 assertion that the entry is actually in the table has found
5091 bugs. */
5092
5093 static void
5094 remove_addr_table_entry (addr_table_entry *entry)
5095 {
5096 gcc_assert (dwarf_split_debug_info && addr_index_table);
5097 /* After an index is assigned, the table is frozen. */
5098 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5099 entry->refcount--;
5100 }
5101
5102 /* Given a location list, remove all addresses it refers to from the
5103 address_table. */
5104
5105 static void
5106 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5107 {
5108 for (; descr; descr = descr->dw_loc_next)
5109 if (descr->dw_loc_oprnd1.val_entry != NULL)
5110 {
5111 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5112 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5113 }
5114 }
5115
5116 /* A helper function for dwarf2out_finish called through
5117 htab_traverse. Assign an addr_table_entry its index. All entries
5118 must be collected into the table when this function is called,
5119 because the indexing code relies on htab_traverse to traverse nodes
5120 in the same order for each run. */
5121
5122 int
5123 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5124 {
5125 addr_table_entry *node = *h;
5126
5127 /* Don't index unreferenced nodes. */
5128 if (node->refcount == 0)
5129 return 1;
5130
5131 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5132 node->index = *index;
5133 *index += 1;
5134
5135 return 1;
5136 }
5137
5138 /* Add an address constant attribute value to a DIE. When using
5139 dwarf_split_debug_info, address attributes in dies destined for the
5140 final executable should be direct references--setting the parameter
5141 force_direct ensures this behavior. */
5142
5143 static inline void
5144 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5145 bool force_direct)
5146 {
5147 dw_attr_node attr;
5148
5149 attr.dw_attr = attr_kind;
5150 attr.dw_attr_val.val_class = dw_val_class_addr;
5151 attr.dw_attr_val.v.val_addr = addr;
5152 if (dwarf_split_debug_info && !force_direct)
5153 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5154 else
5155 attr.dw_attr_val.val_entry = NULL;
5156 add_dwarf_attr (die, &attr);
5157 }
5158
5159 /* Get the RTX from to an address DIE attribute. */
5160
5161 static inline rtx
5162 AT_addr (dw_attr_node *a)
5163 {
5164 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5165 return a->dw_attr_val.v.val_addr;
5166 }
5167
5168 /* Add a file attribute value to a DIE. */
5169
5170 static inline void
5171 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5172 struct dwarf_file_data *fd)
5173 {
5174 dw_attr_node attr;
5175
5176 attr.dw_attr = attr_kind;
5177 attr.dw_attr_val.val_class = dw_val_class_file;
5178 attr.dw_attr_val.val_entry = NULL;
5179 attr.dw_attr_val.v.val_file = fd;
5180 add_dwarf_attr (die, &attr);
5181 }
5182
5183 /* Get the dwarf_file_data from a file DIE attribute. */
5184
5185 static inline struct dwarf_file_data *
5186 AT_file (dw_attr_node *a)
5187 {
5188 gcc_assert (a && (AT_class (a) == dw_val_class_file
5189 || AT_class (a) == dw_val_class_file_implicit));
5190 return a->dw_attr_val.v.val_file;
5191 }
5192
5193 /* Add a vms delta attribute value to a DIE. */
5194
5195 static inline void
5196 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5197 const char *lbl1, const char *lbl2)
5198 {
5199 dw_attr_node attr;
5200
5201 attr.dw_attr = attr_kind;
5202 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5203 attr.dw_attr_val.val_entry = NULL;
5204 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5205 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5206 add_dwarf_attr (die, &attr);
5207 }
5208
5209 /* Add a symbolic view identifier attribute value to a DIE. */
5210
5211 static inline void
5212 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5213 const char *view_label)
5214 {
5215 dw_attr_node attr;
5216
5217 attr.dw_attr = attr_kind;
5218 attr.dw_attr_val.val_class = dw_val_class_symview;
5219 attr.dw_attr_val.val_entry = NULL;
5220 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5221 add_dwarf_attr (die, &attr);
5222 }
5223
5224 /* Add a label identifier attribute value to a DIE. */
5225
5226 static inline void
5227 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5228 const char *lbl_id)
5229 {
5230 dw_attr_node attr;
5231
5232 attr.dw_attr = attr_kind;
5233 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5234 attr.dw_attr_val.val_entry = NULL;
5235 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5236 if (dwarf_split_debug_info)
5237 attr.dw_attr_val.val_entry
5238 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5239 ate_kind_label);
5240 add_dwarf_attr (die, &attr);
5241 }
5242
5243 /* Add a section offset attribute value to a DIE, an offset into the
5244 debug_line section. */
5245
5246 static inline void
5247 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5248 const char *label)
5249 {
5250 dw_attr_node attr;
5251
5252 attr.dw_attr = attr_kind;
5253 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5254 attr.dw_attr_val.val_entry = NULL;
5255 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5256 add_dwarf_attr (die, &attr);
5257 }
5258
5259 /* Add a section offset attribute value to a DIE, an offset into the
5260 debug_macinfo section. */
5261
5262 static inline void
5263 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5264 const char *label)
5265 {
5266 dw_attr_node attr;
5267
5268 attr.dw_attr = attr_kind;
5269 attr.dw_attr_val.val_class = dw_val_class_macptr;
5270 attr.dw_attr_val.val_entry = NULL;
5271 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5272 add_dwarf_attr (die, &attr);
5273 }
5274
5275 /* Add a range_list attribute value to a DIE. When using
5276 dwarf_split_debug_info, address attributes in dies destined for the
5277 final executable should be direct references--setting the parameter
5278 force_direct ensures this behavior. */
5279
5280 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5281 #define RELOCATED_OFFSET (NULL)
5282
5283 static void
5284 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5285 long unsigned int offset, bool force_direct)
5286 {
5287 dw_attr_node attr;
5288
5289 attr.dw_attr = attr_kind;
5290 attr.dw_attr_val.val_class = dw_val_class_range_list;
5291 /* For the range_list attribute, use val_entry to store whether the
5292 offset should follow split-debug-info or normal semantics. This
5293 value is read in output_range_list_offset. */
5294 if (dwarf_split_debug_info && !force_direct)
5295 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5296 else
5297 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5298 attr.dw_attr_val.v.val_offset = offset;
5299 add_dwarf_attr (die, &attr);
5300 }
5301
5302 /* Return the start label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta1 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl1;
5309 }
5310
5311 /* Return the end label of a delta attribute. */
5312
5313 static inline const char *
5314 AT_vms_delta2 (dw_attr_node *a)
5315 {
5316 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5317 return a->dw_attr_val.v.val_vms_delta.lbl2;
5318 }
5319
5320 static inline const char *
5321 AT_lbl (dw_attr_node *a)
5322 {
5323 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5324 || AT_class (a) == dw_val_class_lineptr
5325 || AT_class (a) == dw_val_class_macptr
5326 || AT_class (a) == dw_val_class_loclistsptr
5327 || AT_class (a) == dw_val_class_high_pc));
5328 return a->dw_attr_val.v.val_lbl_id;
5329 }
5330
5331 /* Get the attribute of type attr_kind. */
5332
5333 static dw_attr_node *
5334 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5335 {
5336 dw_attr_node *a;
5337 unsigned ix;
5338 dw_die_ref spec = NULL;
5339
5340 if (! die)
5341 return NULL;
5342
5343 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5344 if (a->dw_attr == attr_kind)
5345 return a;
5346 else if (a->dw_attr == DW_AT_specification
5347 || a->dw_attr == DW_AT_abstract_origin)
5348 spec = AT_ref (a);
5349
5350 if (spec)
5351 return get_AT (spec, attr_kind);
5352
5353 return NULL;
5354 }
5355
5356 /* Returns the parent of the declaration of DIE. */
5357
5358 static dw_die_ref
5359 get_die_parent (dw_die_ref die)
5360 {
5361 dw_die_ref t;
5362
5363 if (!die)
5364 return NULL;
5365
5366 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5367 || (t = get_AT_ref (die, DW_AT_specification)))
5368 die = t;
5369
5370 return die->die_parent;
5371 }
5372
5373 /* Return the "low pc" attribute value, typically associated with a subprogram
5374 DIE. Return null if the "low pc" attribute is either not present, or if it
5375 cannot be represented as an assembler label identifier. */
5376
5377 static inline const char *
5378 get_AT_low_pc (dw_die_ref die)
5379 {
5380 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5381
5382 return a ? AT_lbl (a) : NULL;
5383 }
5384
5385 /* Return the value of the string attribute designated by ATTR_KIND, or
5386 NULL if it is not present. */
5387
5388 static inline const char *
5389 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5390 {
5391 dw_attr_node *a = get_AT (die, attr_kind);
5392
5393 return a ? AT_string (a) : NULL;
5394 }
5395
5396 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5397 if it is not present. */
5398
5399 static inline int
5400 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5401 {
5402 dw_attr_node *a = get_AT (die, attr_kind);
5403
5404 return a ? AT_flag (a) : 0;
5405 }
5406
5407 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5408 if it is not present. */
5409
5410 static inline unsigned
5411 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5412 {
5413 dw_attr_node *a = get_AT (die, attr_kind);
5414
5415 return a ? AT_unsigned (a) : 0;
5416 }
5417
5418 static inline dw_die_ref
5419 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5420 {
5421 dw_attr_node *a = get_AT (die, attr_kind);
5422
5423 return a ? AT_ref (a) : NULL;
5424 }
5425
5426 static inline struct dwarf_file_data *
5427 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5428 {
5429 dw_attr_node *a = get_AT (die, attr_kind);
5430
5431 return a ? AT_file (a) : NULL;
5432 }
5433
5434 /* Return TRUE if the language is C. */
5435
5436 static inline bool
5437 is_c (void)
5438 {
5439 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5440
5441 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5442 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5443
5444
5445 }
5446
5447 /* Return TRUE if the language is C++. */
5448
5449 static inline bool
5450 is_cxx (void)
5451 {
5452 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5453
5454 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5455 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5456 }
5457
5458 /* Return TRUE if DECL was created by the C++ frontend. */
5459
5460 static bool
5461 is_cxx (const_tree decl)
5462 {
5463 if (in_lto_p)
5464 {
5465 const_tree context = get_ultimate_context (decl);
5466 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5467 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5468 }
5469 return is_cxx ();
5470 }
5471
5472 /* Return TRUE if the language is Fortran. */
5473
5474 static inline bool
5475 is_fortran (void)
5476 {
5477 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5478
5479 return (lang == DW_LANG_Fortran77
5480 || lang == DW_LANG_Fortran90
5481 || lang == DW_LANG_Fortran95
5482 || lang == DW_LANG_Fortran03
5483 || lang == DW_LANG_Fortran08);
5484 }
5485
5486 static inline bool
5487 is_fortran (const_tree decl)
5488 {
5489 if (in_lto_p)
5490 {
5491 const_tree context = get_ultimate_context (decl);
5492 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5493 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5494 "GNU Fortran", 11) == 0
5495 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5496 "GNU F77") == 0);
5497 }
5498 return is_fortran ();
5499 }
5500
5501 /* Return TRUE if the language is Ada. */
5502
5503 static inline bool
5504 is_ada (void)
5505 {
5506 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5507
5508 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5509 }
5510
5511 /* Return TRUE if the language is D. */
5512
5513 static inline bool
5514 is_dlang (void)
5515 {
5516 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5517
5518 return lang == DW_LANG_D;
5519 }
5520
5521 /* Remove the specified attribute if present. Return TRUE if removal
5522 was successful. */
5523
5524 static bool
5525 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5526 {
5527 dw_attr_node *a;
5528 unsigned ix;
5529
5530 if (! die)
5531 return false;
5532
5533 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5534 if (a->dw_attr == attr_kind)
5535 {
5536 if (AT_class (a) == dw_val_class_str)
5537 if (a->dw_attr_val.v.val_str->refcount)
5538 a->dw_attr_val.v.val_str->refcount--;
5539
5540 /* vec::ordered_remove should help reduce the number of abbrevs
5541 that are needed. */
5542 die->die_attr->ordered_remove (ix);
5543 return true;
5544 }
5545 return false;
5546 }
5547
5548 /* Remove CHILD from its parent. PREV must have the property that
5549 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5550
5551 static void
5552 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5553 {
5554 gcc_assert (child->die_parent == prev->die_parent);
5555 gcc_assert (prev->die_sib == child);
5556 if (prev == child)
5557 {
5558 gcc_assert (child->die_parent->die_child == child);
5559 prev = NULL;
5560 }
5561 else
5562 prev->die_sib = child->die_sib;
5563 if (child->die_parent->die_child == child)
5564 child->die_parent->die_child = prev;
5565 child->die_sib = NULL;
5566 }
5567
5568 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5569 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5570
5571 static void
5572 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5573 {
5574 dw_die_ref parent = old_child->die_parent;
5575
5576 gcc_assert (parent == prev->die_parent);
5577 gcc_assert (prev->die_sib == old_child);
5578
5579 new_child->die_parent = parent;
5580 if (prev == old_child)
5581 {
5582 gcc_assert (parent->die_child == old_child);
5583 new_child->die_sib = new_child;
5584 }
5585 else
5586 {
5587 prev->die_sib = new_child;
5588 new_child->die_sib = old_child->die_sib;
5589 }
5590 if (old_child->die_parent->die_child == old_child)
5591 old_child->die_parent->die_child = new_child;
5592 old_child->die_sib = NULL;
5593 }
5594
5595 /* Move all children from OLD_PARENT to NEW_PARENT. */
5596
5597 static void
5598 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5599 {
5600 dw_die_ref c;
5601 new_parent->die_child = old_parent->die_child;
5602 old_parent->die_child = NULL;
5603 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5604 }
5605
5606 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5607 matches TAG. */
5608
5609 static void
5610 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5611 {
5612 dw_die_ref c;
5613
5614 c = die->die_child;
5615 if (c) do {
5616 dw_die_ref prev = c;
5617 c = c->die_sib;
5618 while (c->die_tag == tag)
5619 {
5620 remove_child_with_prev (c, prev);
5621 c->die_parent = NULL;
5622 /* Might have removed every child. */
5623 if (die->die_child == NULL)
5624 return;
5625 c = prev->die_sib;
5626 }
5627 } while (c != die->die_child);
5628 }
5629
5630 /* Add a CHILD_DIE as the last child of DIE. */
5631
5632 static void
5633 add_child_die (dw_die_ref die, dw_die_ref child_die)
5634 {
5635 /* FIXME this should probably be an assert. */
5636 if (! die || ! child_die)
5637 return;
5638 gcc_assert (die != child_die);
5639
5640 child_die->die_parent = die;
5641 if (die->die_child)
5642 {
5643 child_die->die_sib = die->die_child->die_sib;
5644 die->die_child->die_sib = child_die;
5645 }
5646 else
5647 child_die->die_sib = child_die;
5648 die->die_child = child_die;
5649 }
5650
5651 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5652
5653 static void
5654 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5655 dw_die_ref after_die)
5656 {
5657 gcc_assert (die
5658 && child_die
5659 && after_die
5660 && die->die_child
5661 && die != child_die);
5662
5663 child_die->die_parent = die;
5664 child_die->die_sib = after_die->die_sib;
5665 after_die->die_sib = child_die;
5666 if (die->die_child == after_die)
5667 die->die_child = child_die;
5668 }
5669
5670 /* Unassociate CHILD from its parent, and make its parent be
5671 NEW_PARENT. */
5672
5673 static void
5674 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5675 {
5676 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5677 if (p->die_sib == child)
5678 {
5679 remove_child_with_prev (child, p);
5680 break;
5681 }
5682 add_child_die (new_parent, child);
5683 }
5684
5685 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5686 is the specification, to the end of PARENT's list of children.
5687 This is done by removing and re-adding it. */
5688
5689 static void
5690 splice_child_die (dw_die_ref parent, dw_die_ref child)
5691 {
5692 /* We want the declaration DIE from inside the class, not the
5693 specification DIE at toplevel. */
5694 if (child->die_parent != parent)
5695 {
5696 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5697
5698 if (tmp)
5699 child = tmp;
5700 }
5701
5702 gcc_assert (child->die_parent == parent
5703 || (child->die_parent
5704 == get_AT_ref (parent, DW_AT_specification)));
5705
5706 reparent_child (child, parent);
5707 }
5708
5709 /* Create and return a new die with TAG_VALUE as tag. */
5710
5711 static inline dw_die_ref
5712 new_die_raw (enum dwarf_tag tag_value)
5713 {
5714 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5715 die->die_tag = tag_value;
5716 return die;
5717 }
5718
5719 /* Create and return a new die with a parent of PARENT_DIE. If
5720 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5721 associated tree T must be supplied to determine parenthood
5722 later. */
5723
5724 static inline dw_die_ref
5725 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5726 {
5727 dw_die_ref die = new_die_raw (tag_value);
5728
5729 if (parent_die != NULL)
5730 add_child_die (parent_die, die);
5731 else
5732 {
5733 limbo_die_node *limbo_node;
5734
5735 /* No DIEs created after early dwarf should end up in limbo,
5736 because the limbo list should not persist past LTO
5737 streaming. */
5738 if (tag_value != DW_TAG_compile_unit
5739 /* These are allowed because they're generated while
5740 breaking out COMDAT units late. */
5741 && tag_value != DW_TAG_type_unit
5742 && tag_value != DW_TAG_skeleton_unit
5743 && !early_dwarf
5744 /* Allow nested functions to live in limbo because they will
5745 only temporarily live there, as decls_for_scope will fix
5746 them up. */
5747 && (TREE_CODE (t) != FUNCTION_DECL
5748 || !decl_function_context (t))
5749 /* Same as nested functions above but for types. Types that
5750 are local to a function will be fixed in
5751 decls_for_scope. */
5752 && (!RECORD_OR_UNION_TYPE_P (t)
5753 || !TYPE_CONTEXT (t)
5754 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5755 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5756 especially in the ltrans stage, but once we implement LTO
5757 dwarf streaming, we should remove this exception. */
5758 && !in_lto_p)
5759 {
5760 fprintf (stderr, "symbol ended up in limbo too late:");
5761 debug_generic_stmt (t);
5762 gcc_unreachable ();
5763 }
5764
5765 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5766 limbo_node->die = die;
5767 limbo_node->created_for = t;
5768 limbo_node->next = limbo_die_list;
5769 limbo_die_list = limbo_node;
5770 }
5771
5772 return die;
5773 }
5774
5775 /* Return the DIE associated with the given type specifier. */
5776
5777 static inline dw_die_ref
5778 lookup_type_die (tree type)
5779 {
5780 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5781 if (die && die->removed)
5782 {
5783 TYPE_SYMTAB_DIE (type) = NULL;
5784 return NULL;
5785 }
5786 return die;
5787 }
5788
5789 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5790 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5791 anonymous type instead the one of the naming typedef. */
5792
5793 static inline dw_die_ref
5794 strip_naming_typedef (tree type, dw_die_ref type_die)
5795 {
5796 if (type
5797 && TREE_CODE (type) == RECORD_TYPE
5798 && type_die
5799 && type_die->die_tag == DW_TAG_typedef
5800 && is_naming_typedef_decl (TYPE_NAME (type)))
5801 type_die = get_AT_ref (type_die, DW_AT_type);
5802 return type_die;
5803 }
5804
5805 /* Like lookup_type_die, but if type is an anonymous type named by a
5806 typedef[1], return the DIE of the anonymous type instead the one of
5807 the naming typedef. This is because in gen_typedef_die, we did
5808 equate the anonymous struct named by the typedef with the DIE of
5809 the naming typedef. So by default, lookup_type_die on an anonymous
5810 struct yields the DIE of the naming typedef.
5811
5812 [1]: Read the comment of is_naming_typedef_decl to learn about what
5813 a naming typedef is. */
5814
5815 static inline dw_die_ref
5816 lookup_type_die_strip_naming_typedef (tree type)
5817 {
5818 dw_die_ref die = lookup_type_die (type);
5819 return strip_naming_typedef (type, die);
5820 }
5821
5822 /* Equate a DIE to a given type specifier. */
5823
5824 static inline void
5825 equate_type_number_to_die (tree type, dw_die_ref type_die)
5826 {
5827 TYPE_SYMTAB_DIE (type) = type_die;
5828 }
5829
5830 static dw_die_ref maybe_create_die_with_external_ref (tree);
5831 struct GTY(()) sym_off_pair
5832 {
5833 const char * GTY((skip)) sym;
5834 unsigned HOST_WIDE_INT off;
5835 };
5836 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5837
5838 /* Returns a hash value for X (which really is a die_struct). */
5839
5840 inline hashval_t
5841 decl_die_hasher::hash (die_node *x)
5842 {
5843 return (hashval_t) x->decl_id;
5844 }
5845
5846 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5847
5848 inline bool
5849 decl_die_hasher::equal (die_node *x, tree y)
5850 {
5851 return (x->decl_id == DECL_UID (y));
5852 }
5853
5854 /* Return the DIE associated with a given declaration. */
5855
5856 static inline dw_die_ref
5857 lookup_decl_die (tree decl)
5858 {
5859 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5860 NO_INSERT);
5861 if (!die)
5862 {
5863 if (in_lto_p)
5864 return maybe_create_die_with_external_ref (decl);
5865 return NULL;
5866 }
5867 if ((*die)->removed)
5868 {
5869 decl_die_table->clear_slot (die);
5870 return NULL;
5871 }
5872 return *die;
5873 }
5874
5875
5876 /* Return the DIE associated with BLOCK. */
5877
5878 static inline dw_die_ref
5879 lookup_block_die (tree block)
5880 {
5881 dw_die_ref die = BLOCK_DIE (block);
5882 if (!die && in_lto_p)
5883 return maybe_create_die_with_external_ref (block);
5884 return die;
5885 }
5886
5887 /* Associate DIE with BLOCK. */
5888
5889 static inline void
5890 equate_block_to_die (tree block, dw_die_ref die)
5891 {
5892 BLOCK_DIE (block) = die;
5893 }
5894 #undef BLOCK_DIE
5895
5896
5897 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5898 style reference. Return true if we found one refering to a DIE for
5899 DECL, otherwise return false. */
5900
5901 static bool
5902 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5903 unsigned HOST_WIDE_INT *off)
5904 {
5905 dw_die_ref die;
5906
5907 if (in_lto_p)
5908 {
5909 /* During WPA stage and incremental linking we use a hash-map
5910 to store the decl <-> label + offset map. */
5911 if (!external_die_map)
5912 return false;
5913 sym_off_pair *desc = external_die_map->get (decl);
5914 if (!desc)
5915 return false;
5916 *sym = desc->sym;
5917 *off = desc->off;
5918 return true;
5919 }
5920
5921 if (TREE_CODE (decl) == BLOCK)
5922 die = lookup_block_die (decl);
5923 else
5924 die = lookup_decl_die (decl);
5925 if (!die)
5926 return false;
5927
5928 /* Similar to get_ref_die_offset_label, but using the "correct"
5929 label. */
5930 *off = die->die_offset;
5931 while (die->die_parent)
5932 die = die->die_parent;
5933 /* For the containing CU DIE we compute a die_symbol in
5934 compute_comp_unit_symbol. */
5935 gcc_assert (die->die_tag == DW_TAG_compile_unit
5936 && die->die_id.die_symbol != NULL);
5937 *sym = die->die_id.die_symbol;
5938 return true;
5939 }
5940
5941 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5942
5943 static void
5944 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5945 const char *symbol, HOST_WIDE_INT offset)
5946 {
5947 /* Create a fake DIE that contains the reference. Don't use
5948 new_die because we don't want to end up in the limbo list. */
5949 /* ??? We probably want to share these, thus put a ref to the DIE
5950 we create here to the external_die_map entry. */
5951 dw_die_ref ref = new_die_raw (die->die_tag);
5952 ref->die_id.die_symbol = symbol;
5953 ref->die_offset = offset;
5954 ref->with_offset = 1;
5955 add_AT_die_ref (die, attr_kind, ref);
5956 }
5957
5958 /* Create a DIE for DECL if required and add a reference to a DIE
5959 at SYMBOL + OFFSET which contains attributes dumped early. */
5960
5961 static void
5962 dwarf2out_register_external_die (tree decl, const char *sym,
5963 unsigned HOST_WIDE_INT off)
5964 {
5965 if (debug_info_level == DINFO_LEVEL_NONE)
5966 return;
5967
5968 if (!external_die_map)
5969 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5970 gcc_checking_assert (!external_die_map->get (decl));
5971 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5972 external_die_map->put (decl, p);
5973 }
5974
5975 /* If we have a registered external DIE for DECL return a new DIE for
5976 the concrete instance with an appropriate abstract origin. */
5977
5978 static dw_die_ref
5979 maybe_create_die_with_external_ref (tree decl)
5980 {
5981 if (!external_die_map)
5982 return NULL;
5983 sym_off_pair *desc = external_die_map->get (decl);
5984 if (!desc)
5985 return NULL;
5986
5987 const char *sym = desc->sym;
5988 unsigned HOST_WIDE_INT off = desc->off;
5989 external_die_map->remove (decl);
5990
5991 in_lto_p = false;
5992 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5993 ? lookup_block_die (decl) : lookup_decl_die (decl));
5994 gcc_assert (!die);
5995 in_lto_p = true;
5996
5997 tree ctx;
5998 dw_die_ref parent = NULL;
5999 /* Need to lookup a DIE for the decls context - the containing
6000 function or translation unit. */
6001 if (TREE_CODE (decl) == BLOCK)
6002 {
6003 ctx = BLOCK_SUPERCONTEXT (decl);
6004 /* ??? We do not output DIEs for all scopes thus skip as
6005 many DIEs as needed. */
6006 while (TREE_CODE (ctx) == BLOCK
6007 && !lookup_block_die (ctx))
6008 ctx = BLOCK_SUPERCONTEXT (ctx);
6009 }
6010 else
6011 ctx = DECL_CONTEXT (decl);
6012 /* Peel types in the context stack. */
6013 while (ctx && TYPE_P (ctx))
6014 ctx = TYPE_CONTEXT (ctx);
6015 /* Likewise namespaces in case we do not want to emit DIEs for them. */
6016 if (debug_info_level <= DINFO_LEVEL_TERSE)
6017 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
6018 ctx = DECL_CONTEXT (ctx);
6019 if (ctx)
6020 {
6021 if (TREE_CODE (ctx) == BLOCK)
6022 parent = lookup_block_die (ctx);
6023 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
6024 /* Keep the 1:1 association during WPA. */
6025 && !flag_wpa
6026 && flag_incremental_link != INCREMENTAL_LINK_LTO)
6027 /* Otherwise all late annotations go to the main CU which
6028 imports the original CUs. */
6029 parent = comp_unit_die ();
6030 else if (TREE_CODE (ctx) == FUNCTION_DECL
6031 && TREE_CODE (decl) != FUNCTION_DECL
6032 && TREE_CODE (decl) != PARM_DECL
6033 && TREE_CODE (decl) != RESULT_DECL
6034 && TREE_CODE (decl) != BLOCK)
6035 /* Leave function local entities parent determination to when
6036 we process scope vars. */
6037 ;
6038 else
6039 parent = lookup_decl_die (ctx);
6040 }
6041 else
6042 /* In some cases the FEs fail to set DECL_CONTEXT properly.
6043 Handle this case gracefully by globalizing stuff. */
6044 parent = comp_unit_die ();
6045 /* Create a DIE "stub". */
6046 switch (TREE_CODE (decl))
6047 {
6048 case TRANSLATION_UNIT_DECL:
6049 {
6050 die = comp_unit_die ();
6051 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6052 to create a DIE for the original CUs. */
6053 return die;
6054 }
6055 case NAMESPACE_DECL:
6056 if (is_fortran (decl))
6057 die = new_die (DW_TAG_module, parent, decl);
6058 else
6059 die = new_die (DW_TAG_namespace, parent, decl);
6060 break;
6061 case FUNCTION_DECL:
6062 die = new_die (DW_TAG_subprogram, parent, decl);
6063 break;
6064 case VAR_DECL:
6065 die = new_die (DW_TAG_variable, parent, decl);
6066 break;
6067 case RESULT_DECL:
6068 die = new_die (DW_TAG_variable, parent, decl);
6069 break;
6070 case PARM_DECL:
6071 die = new_die (DW_TAG_formal_parameter, parent, decl);
6072 break;
6073 case CONST_DECL:
6074 die = new_die (DW_TAG_constant, parent, decl);
6075 break;
6076 case LABEL_DECL:
6077 die = new_die (DW_TAG_label, parent, decl);
6078 break;
6079 case BLOCK:
6080 die = new_die (DW_TAG_lexical_block, parent, decl);
6081 break;
6082 default:
6083 gcc_unreachable ();
6084 }
6085 if (TREE_CODE (decl) == BLOCK)
6086 equate_block_to_die (decl, die);
6087 else
6088 equate_decl_number_to_die (decl, die);
6089
6090 add_desc_attribute (die, decl);
6091
6092 /* Add a reference to the DIE providing early debug at $sym + off. */
6093 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6094
6095 return die;
6096 }
6097
6098 /* Returns a hash value for X (which really is a var_loc_list). */
6099
6100 inline hashval_t
6101 decl_loc_hasher::hash (var_loc_list *x)
6102 {
6103 return (hashval_t) x->decl_id;
6104 }
6105
6106 /* Return nonzero if decl_id of var_loc_list X is the same as
6107 UID of decl *Y. */
6108
6109 inline bool
6110 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6111 {
6112 return (x->decl_id == DECL_UID (y));
6113 }
6114
6115 /* Return the var_loc list associated with a given declaration. */
6116
6117 static inline var_loc_list *
6118 lookup_decl_loc (const_tree decl)
6119 {
6120 if (!decl_loc_table)
6121 return NULL;
6122 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6123 }
6124
6125 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6126
6127 inline hashval_t
6128 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6129 {
6130 return (hashval_t) x->decl_id;
6131 }
6132
6133 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6134 UID of decl *Y. */
6135
6136 inline bool
6137 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6138 {
6139 return (x->decl_id == DECL_UID (y));
6140 }
6141
6142 /* Equate a DIE to a particular declaration. */
6143
6144 static void
6145 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6146 {
6147 unsigned int decl_id = DECL_UID (decl);
6148
6149 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6150 decl_die->decl_id = decl_id;
6151 }
6152
6153 /* Return how many bits covers PIECE EXPR_LIST. */
6154
6155 static HOST_WIDE_INT
6156 decl_piece_bitsize (rtx piece)
6157 {
6158 int ret = (int) GET_MODE (piece);
6159 if (ret)
6160 return ret;
6161 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6162 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6163 return INTVAL (XEXP (XEXP (piece, 0), 0));
6164 }
6165
6166 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6167
6168 static rtx *
6169 decl_piece_varloc_ptr (rtx piece)
6170 {
6171 if ((int) GET_MODE (piece))
6172 return &XEXP (piece, 0);
6173 else
6174 return &XEXP (XEXP (piece, 0), 1);
6175 }
6176
6177 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6178 Next is the chain of following piece nodes. */
6179
6180 static rtx_expr_list *
6181 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6182 {
6183 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6184 return alloc_EXPR_LIST (bitsize, loc_note, next);
6185 else
6186 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6187 GEN_INT (bitsize),
6188 loc_note), next);
6189 }
6190
6191 /* Return rtx that should be stored into loc field for
6192 LOC_NOTE and BITPOS/BITSIZE. */
6193
6194 static rtx
6195 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6196 HOST_WIDE_INT bitsize)
6197 {
6198 if (bitsize != -1)
6199 {
6200 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6201 if (bitpos != 0)
6202 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6203 }
6204 return loc_note;
6205 }
6206
6207 /* This function either modifies location piece list *DEST in
6208 place (if SRC and INNER is NULL), or copies location piece list
6209 *SRC to *DEST while modifying it. Location BITPOS is modified
6210 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6211 not copied and if needed some padding around it is added.
6212 When modifying in place, DEST should point to EXPR_LIST where
6213 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6214 to the start of the whole list and INNER points to the EXPR_LIST
6215 where earlier pieces cover PIECE_BITPOS bits. */
6216
6217 static void
6218 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6219 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6220 HOST_WIDE_INT bitsize, rtx loc_note)
6221 {
6222 HOST_WIDE_INT diff;
6223 bool copy = inner != NULL;
6224
6225 if (copy)
6226 {
6227 /* First copy all nodes preceding the current bitpos. */
6228 while (src != inner)
6229 {
6230 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6231 decl_piece_bitsize (*src), NULL_RTX);
6232 dest = &XEXP (*dest, 1);
6233 src = &XEXP (*src, 1);
6234 }
6235 }
6236 /* Add padding if needed. */
6237 if (bitpos != piece_bitpos)
6238 {
6239 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6240 copy ? NULL_RTX : *dest);
6241 dest = &XEXP (*dest, 1);
6242 }
6243 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6244 {
6245 gcc_assert (!copy);
6246 /* A piece with correct bitpos and bitsize already exist,
6247 just update the location for it and return. */
6248 *decl_piece_varloc_ptr (*dest) = loc_note;
6249 return;
6250 }
6251 /* Add the piece that changed. */
6252 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6253 dest = &XEXP (*dest, 1);
6254 /* Skip over pieces that overlap it. */
6255 diff = bitpos - piece_bitpos + bitsize;
6256 if (!copy)
6257 src = dest;
6258 while (diff > 0 && *src)
6259 {
6260 rtx piece = *src;
6261 diff -= decl_piece_bitsize (piece);
6262 if (copy)
6263 src = &XEXP (piece, 1);
6264 else
6265 {
6266 *src = XEXP (piece, 1);
6267 free_EXPR_LIST_node (piece);
6268 }
6269 }
6270 /* Add padding if needed. */
6271 if (diff < 0 && *src)
6272 {
6273 if (!copy)
6274 dest = src;
6275 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6276 dest = &XEXP (*dest, 1);
6277 }
6278 if (!copy)
6279 return;
6280 /* Finally copy all nodes following it. */
6281 while (*src)
6282 {
6283 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6284 decl_piece_bitsize (*src), NULL_RTX);
6285 dest = &XEXP (*dest, 1);
6286 src = &XEXP (*src, 1);
6287 }
6288 }
6289
6290 /* Add a variable location node to the linked list for DECL. */
6291
6292 static struct var_loc_node *
6293 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6294 {
6295 unsigned int decl_id;
6296 var_loc_list *temp;
6297 struct var_loc_node *loc = NULL;
6298 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6299
6300 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6301 {
6302 tree realdecl = DECL_DEBUG_EXPR (decl);
6303 if (handled_component_p (realdecl)
6304 || (TREE_CODE (realdecl) == MEM_REF
6305 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6306 {
6307 bool reverse;
6308 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6309 &bitsize, &reverse);
6310 if (!innerdecl
6311 || !DECL_P (innerdecl)
6312 || DECL_IGNORED_P (innerdecl)
6313 || TREE_STATIC (innerdecl)
6314 || bitsize == 0
6315 || bitpos + bitsize > 256)
6316 return NULL;
6317 decl = innerdecl;
6318 }
6319 }
6320
6321 decl_id = DECL_UID (decl);
6322 var_loc_list **slot
6323 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6324 if (*slot == NULL)
6325 {
6326 temp = ggc_cleared_alloc<var_loc_list> ();
6327 temp->decl_id = decl_id;
6328 *slot = temp;
6329 }
6330 else
6331 temp = *slot;
6332
6333 /* For PARM_DECLs try to keep around the original incoming value,
6334 even if that means we'll emit a zero-range .debug_loc entry. */
6335 if (temp->last
6336 && temp->first == temp->last
6337 && TREE_CODE (decl) == PARM_DECL
6338 && NOTE_P (temp->first->loc)
6339 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6340 && DECL_INCOMING_RTL (decl)
6341 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6342 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6343 == GET_CODE (DECL_INCOMING_RTL (decl))
6344 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6345 && (bitsize != -1
6346 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6347 NOTE_VAR_LOCATION_LOC (loc_note))
6348 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6349 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6350 {
6351 loc = ggc_cleared_alloc<var_loc_node> ();
6352 temp->first->next = loc;
6353 temp->last = loc;
6354 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6355 }
6356 else if (temp->last)
6357 {
6358 struct var_loc_node *last = temp->last, *unused = NULL;
6359 rtx *piece_loc = NULL, last_loc_note;
6360 HOST_WIDE_INT piece_bitpos = 0;
6361 if (last->next)
6362 {
6363 last = last->next;
6364 gcc_assert (last->next == NULL);
6365 }
6366 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6367 {
6368 piece_loc = &last->loc;
6369 do
6370 {
6371 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6372 if (piece_bitpos + cur_bitsize > bitpos)
6373 break;
6374 piece_bitpos += cur_bitsize;
6375 piece_loc = &XEXP (*piece_loc, 1);
6376 }
6377 while (*piece_loc);
6378 }
6379 /* TEMP->LAST here is either pointer to the last but one or
6380 last element in the chained list, LAST is pointer to the
6381 last element. */
6382 if (label && strcmp (last->label, label) == 0 && last->view == view)
6383 {
6384 /* For SRA optimized variables if there weren't any real
6385 insns since last note, just modify the last node. */
6386 if (piece_loc != NULL)
6387 {
6388 adjust_piece_list (piece_loc, NULL, NULL,
6389 bitpos, piece_bitpos, bitsize, loc_note);
6390 return NULL;
6391 }
6392 /* If the last note doesn't cover any instructions, remove it. */
6393 if (temp->last != last)
6394 {
6395 temp->last->next = NULL;
6396 unused = last;
6397 last = temp->last;
6398 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6399 }
6400 else
6401 {
6402 gcc_assert (temp->first == temp->last
6403 || (temp->first->next == temp->last
6404 && TREE_CODE (decl) == PARM_DECL));
6405 memset (temp->last, '\0', sizeof (*temp->last));
6406 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6407 return temp->last;
6408 }
6409 }
6410 if (bitsize == -1 && NOTE_P (last->loc))
6411 last_loc_note = last->loc;
6412 else if (piece_loc != NULL
6413 && *piece_loc != NULL_RTX
6414 && piece_bitpos == bitpos
6415 && decl_piece_bitsize (*piece_loc) == bitsize)
6416 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6417 else
6418 last_loc_note = NULL_RTX;
6419 /* If the current location is the same as the end of the list,
6420 and either both or neither of the locations is uninitialized,
6421 we have nothing to do. */
6422 if (last_loc_note == NULL_RTX
6423 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6424 NOTE_VAR_LOCATION_LOC (loc_note)))
6425 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6426 != NOTE_VAR_LOCATION_STATUS (loc_note))
6427 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6428 == VAR_INIT_STATUS_UNINITIALIZED)
6429 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6430 == VAR_INIT_STATUS_UNINITIALIZED))))
6431 {
6432 /* Add LOC to the end of list and update LAST. If the last
6433 element of the list has been removed above, reuse its
6434 memory for the new node, otherwise allocate a new one. */
6435 if (unused)
6436 {
6437 loc = unused;
6438 memset (loc, '\0', sizeof (*loc));
6439 }
6440 else
6441 loc = ggc_cleared_alloc<var_loc_node> ();
6442 if (bitsize == -1 || piece_loc == NULL)
6443 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6444 else
6445 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6446 bitpos, piece_bitpos, bitsize, loc_note);
6447 last->next = loc;
6448 /* Ensure TEMP->LAST will point either to the new last but one
6449 element of the chain, or to the last element in it. */
6450 if (last != temp->last)
6451 temp->last = last;
6452 }
6453 else if (unused)
6454 ggc_free (unused);
6455 }
6456 else
6457 {
6458 loc = ggc_cleared_alloc<var_loc_node> ();
6459 temp->first = loc;
6460 temp->last = loc;
6461 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6462 }
6463 return loc;
6464 }
6465 \f
6466 /* Keep track of the number of spaces used to indent the
6467 output of the debugging routines that print the structure of
6468 the DIE internal representation. */
6469 static int print_indent;
6470
6471 /* Indent the line the number of spaces given by print_indent. */
6472
6473 static inline void
6474 print_spaces (FILE *outfile)
6475 {
6476 fprintf (outfile, "%*s", print_indent, "");
6477 }
6478
6479 /* Print a type signature in hex. */
6480
6481 static inline void
6482 print_signature (FILE *outfile, char *sig)
6483 {
6484 int i;
6485
6486 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6487 fprintf (outfile, "%02x", sig[i] & 0xff);
6488 }
6489
6490 static inline void
6491 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6492 {
6493 if (discr_value->pos)
6494 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6495 else
6496 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6497 }
6498
6499 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6500
6501 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6502 RECURSE, output location descriptor operations. */
6503
6504 static void
6505 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6506 {
6507 switch (val->val_class)
6508 {
6509 case dw_val_class_addr:
6510 fprintf (outfile, "address");
6511 break;
6512 case dw_val_class_offset:
6513 fprintf (outfile, "offset");
6514 break;
6515 case dw_val_class_loc:
6516 fprintf (outfile, "location descriptor");
6517 if (val->v.val_loc == NULL)
6518 fprintf (outfile, " -> <null>\n");
6519 else if (recurse)
6520 {
6521 fprintf (outfile, ":\n");
6522 print_indent += 4;
6523 print_loc_descr (val->v.val_loc, outfile);
6524 print_indent -= 4;
6525 }
6526 else
6527 {
6528 if (flag_dump_noaddr || flag_dump_unnumbered)
6529 fprintf (outfile, " #\n");
6530 else
6531 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6532 }
6533 break;
6534 case dw_val_class_loc_list:
6535 fprintf (outfile, "location list -> label:%s",
6536 val->v.val_loc_list->ll_symbol);
6537 break;
6538 case dw_val_class_view_list:
6539 val = view_list_to_loc_list_val_node (val);
6540 fprintf (outfile, "location list with views -> labels:%s and %s",
6541 val->v.val_loc_list->ll_symbol,
6542 val->v.val_loc_list->vl_symbol);
6543 break;
6544 case dw_val_class_range_list:
6545 fprintf (outfile, "range list");
6546 break;
6547 case dw_val_class_const:
6548 case dw_val_class_const_implicit:
6549 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6550 break;
6551 case dw_val_class_unsigned_const:
6552 case dw_val_class_unsigned_const_implicit:
6553 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6554 break;
6555 case dw_val_class_const_double:
6556 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6557 HOST_WIDE_INT_PRINT_UNSIGNED")",
6558 val->v.val_double.high,
6559 val->v.val_double.low);
6560 break;
6561 case dw_val_class_wide_int:
6562 {
6563 int i = val->v.val_wide->get_len ();
6564 fprintf (outfile, "constant (");
6565 gcc_assert (i > 0);
6566 if (val->v.val_wide->elt (i - 1) == 0)
6567 fprintf (outfile, "0x");
6568 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6569 val->v.val_wide->elt (--i));
6570 while (--i >= 0)
6571 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6572 val->v.val_wide->elt (i));
6573 fprintf (outfile, ")");
6574 break;
6575 }
6576 case dw_val_class_vec:
6577 fprintf (outfile, "floating-point or vector constant");
6578 break;
6579 case dw_val_class_flag:
6580 fprintf (outfile, "%u", val->v.val_flag);
6581 break;
6582 case dw_val_class_die_ref:
6583 if (val->v.val_die_ref.die != NULL)
6584 {
6585 dw_die_ref die = val->v.val_die_ref.die;
6586
6587 if (die->comdat_type_p)
6588 {
6589 fprintf (outfile, "die -> signature: ");
6590 print_signature (outfile,
6591 die->die_id.die_type_node->signature);
6592 }
6593 else if (die->die_id.die_symbol)
6594 {
6595 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6596 if (die->with_offset)
6597 fprintf (outfile, " + %ld", die->die_offset);
6598 }
6599 else
6600 fprintf (outfile, "die -> %ld", die->die_offset);
6601 if (flag_dump_noaddr || flag_dump_unnumbered)
6602 fprintf (outfile, " #");
6603 else
6604 fprintf (outfile, " (%p)", (void *) die);
6605 }
6606 else
6607 fprintf (outfile, "die -> <null>");
6608 break;
6609 case dw_val_class_vms_delta:
6610 fprintf (outfile, "delta: @slotcount(%s-%s)",
6611 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6612 break;
6613 case dw_val_class_symview:
6614 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6615 break;
6616 case dw_val_class_lbl_id:
6617 case dw_val_class_lineptr:
6618 case dw_val_class_macptr:
6619 case dw_val_class_loclistsptr:
6620 case dw_val_class_high_pc:
6621 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6622 break;
6623 case dw_val_class_str:
6624 if (val->v.val_str->str != NULL)
6625 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6626 else
6627 fprintf (outfile, "<null>");
6628 break;
6629 case dw_val_class_file:
6630 case dw_val_class_file_implicit:
6631 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6632 val->v.val_file->emitted_number);
6633 break;
6634 case dw_val_class_data8:
6635 {
6636 int i;
6637
6638 for (i = 0; i < 8; i++)
6639 fprintf (outfile, "%02x", val->v.val_data8[i]);
6640 break;
6641 }
6642 case dw_val_class_discr_value:
6643 print_discr_value (outfile, &val->v.val_discr_value);
6644 break;
6645 case dw_val_class_discr_list:
6646 for (dw_discr_list_ref node = val->v.val_discr_list;
6647 node != NULL;
6648 node = node->dw_discr_next)
6649 {
6650 if (node->dw_discr_range)
6651 {
6652 fprintf (outfile, " .. ");
6653 print_discr_value (outfile, &node->dw_discr_lower_bound);
6654 print_discr_value (outfile, &node->dw_discr_upper_bound);
6655 }
6656 else
6657 print_discr_value (outfile, &node->dw_discr_lower_bound);
6658
6659 if (node->dw_discr_next != NULL)
6660 fprintf (outfile, " | ");
6661 }
6662 default:
6663 break;
6664 }
6665 }
6666
6667 /* Likewise, for a DIE attribute. */
6668
6669 static void
6670 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6671 {
6672 print_dw_val (&a->dw_attr_val, recurse, outfile);
6673 }
6674
6675
6676 /* Print the list of operands in the LOC location description to OUTFILE. This
6677 routine is a debugging aid only. */
6678
6679 static void
6680 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6681 {
6682 dw_loc_descr_ref l = loc;
6683
6684 if (loc == NULL)
6685 {
6686 print_spaces (outfile);
6687 fprintf (outfile, "<null>\n");
6688 return;
6689 }
6690
6691 for (l = loc; l != NULL; l = l->dw_loc_next)
6692 {
6693 print_spaces (outfile);
6694 if (flag_dump_noaddr || flag_dump_unnumbered)
6695 fprintf (outfile, "#");
6696 else
6697 fprintf (outfile, "(%p)", (void *) l);
6698 fprintf (outfile, " %s",
6699 dwarf_stack_op_name (l->dw_loc_opc));
6700 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6701 {
6702 fprintf (outfile, " ");
6703 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6704 }
6705 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6706 {
6707 fprintf (outfile, ", ");
6708 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6709 }
6710 fprintf (outfile, "\n");
6711 }
6712 }
6713
6714 /* Print the information associated with a given DIE, and its children.
6715 This routine is a debugging aid only. */
6716
6717 static void
6718 print_die (dw_die_ref die, FILE *outfile)
6719 {
6720 dw_attr_node *a;
6721 dw_die_ref c;
6722 unsigned ix;
6723
6724 print_spaces (outfile);
6725 fprintf (outfile, "DIE %4ld: %s ",
6726 die->die_offset, dwarf_tag_name (die->die_tag));
6727 if (flag_dump_noaddr || flag_dump_unnumbered)
6728 fprintf (outfile, "#\n");
6729 else
6730 fprintf (outfile, "(%p)\n", (void*) die);
6731 print_spaces (outfile);
6732 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6733 fprintf (outfile, " offset: %ld", die->die_offset);
6734 fprintf (outfile, " mark: %d\n", die->die_mark);
6735
6736 if (die->comdat_type_p)
6737 {
6738 print_spaces (outfile);
6739 fprintf (outfile, " signature: ");
6740 print_signature (outfile, die->die_id.die_type_node->signature);
6741 fprintf (outfile, "\n");
6742 }
6743
6744 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6745 {
6746 print_spaces (outfile);
6747 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6748
6749 print_attribute (a, true, outfile);
6750 fprintf (outfile, "\n");
6751 }
6752
6753 if (die->die_child != NULL)
6754 {
6755 print_indent += 4;
6756 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6757 print_indent -= 4;
6758 }
6759 if (print_indent == 0)
6760 fprintf (outfile, "\n");
6761 }
6762
6763 /* Print the list of operations in the LOC location description. */
6764
6765 DEBUG_FUNCTION void
6766 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6767 {
6768 print_loc_descr (loc, stderr);
6769 }
6770
6771 /* Print the information collected for a given DIE. */
6772
6773 DEBUG_FUNCTION void
6774 debug_dwarf_die (dw_die_ref die)
6775 {
6776 print_die (die, stderr);
6777 }
6778
6779 DEBUG_FUNCTION void
6780 debug (die_struct &ref)
6781 {
6782 print_die (&ref, stderr);
6783 }
6784
6785 DEBUG_FUNCTION void
6786 debug (die_struct *ptr)
6787 {
6788 if (ptr)
6789 debug (*ptr);
6790 else
6791 fprintf (stderr, "<nil>\n");
6792 }
6793
6794
6795 /* Print all DWARF information collected for the compilation unit.
6796 This routine is a debugging aid only. */
6797
6798 DEBUG_FUNCTION void
6799 debug_dwarf (void)
6800 {
6801 print_indent = 0;
6802 print_die (comp_unit_die (), stderr);
6803 }
6804
6805 /* Verify the DIE tree structure. */
6806
6807 DEBUG_FUNCTION void
6808 verify_die (dw_die_ref die)
6809 {
6810 gcc_assert (!die->die_mark);
6811 if (die->die_parent == NULL
6812 && die->die_sib == NULL)
6813 return;
6814 /* Verify the die_sib list is cyclic. */
6815 dw_die_ref x = die;
6816 do
6817 {
6818 x->die_mark = 1;
6819 x = x->die_sib;
6820 }
6821 while (x && !x->die_mark);
6822 gcc_assert (x == die);
6823 x = die;
6824 do
6825 {
6826 /* Verify all dies have the same parent. */
6827 gcc_assert (x->die_parent == die->die_parent);
6828 if (x->die_child)
6829 {
6830 /* Verify the child has the proper parent and recurse. */
6831 gcc_assert (x->die_child->die_parent == x);
6832 verify_die (x->die_child);
6833 }
6834 x->die_mark = 0;
6835 x = x->die_sib;
6836 }
6837 while (x && x->die_mark);
6838 }
6839
6840 /* Sanity checks on DIEs. */
6841
6842 static void
6843 check_die (dw_die_ref die)
6844 {
6845 unsigned ix;
6846 dw_attr_node *a;
6847 bool inline_found = false;
6848 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6849 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6850 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6851 {
6852 switch (a->dw_attr)
6853 {
6854 case DW_AT_inline:
6855 if (a->dw_attr_val.v.val_unsigned)
6856 inline_found = true;
6857 break;
6858 case DW_AT_location:
6859 ++n_location;
6860 break;
6861 case DW_AT_low_pc:
6862 ++n_low_pc;
6863 break;
6864 case DW_AT_high_pc:
6865 ++n_high_pc;
6866 break;
6867 case DW_AT_artificial:
6868 ++n_artificial;
6869 break;
6870 case DW_AT_decl_column:
6871 ++n_decl_column;
6872 break;
6873 case DW_AT_decl_line:
6874 ++n_decl_line;
6875 break;
6876 case DW_AT_decl_file:
6877 ++n_decl_file;
6878 break;
6879 default:
6880 break;
6881 }
6882 }
6883 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6884 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6885 {
6886 fprintf (stderr, "Duplicate attributes in DIE:\n");
6887 debug_dwarf_die (die);
6888 gcc_unreachable ();
6889 }
6890 if (inline_found)
6891 {
6892 /* A debugging information entry that is a member of an abstract
6893 instance tree [that has DW_AT_inline] should not contain any
6894 attributes which describe aspects of the subroutine which vary
6895 between distinct inlined expansions or distinct out-of-line
6896 expansions. */
6897 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6898 gcc_assert (a->dw_attr != DW_AT_low_pc
6899 && a->dw_attr != DW_AT_high_pc
6900 && a->dw_attr != DW_AT_location
6901 && a->dw_attr != DW_AT_frame_base
6902 && a->dw_attr != DW_AT_call_all_calls
6903 && a->dw_attr != DW_AT_GNU_all_call_sites);
6904 }
6905 }
6906 \f
6907 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6908 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6909 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6910
6911 /* Calculate the checksum of a location expression. */
6912
6913 static inline void
6914 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6915 {
6916 int tem;
6917 inchash::hash hstate;
6918 hashval_t hash;
6919
6920 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6921 CHECKSUM (tem);
6922 hash_loc_operands (loc, hstate);
6923 hash = hstate.end();
6924 CHECKSUM (hash);
6925 }
6926
6927 /* Calculate the checksum of an attribute. */
6928
6929 static void
6930 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6931 {
6932 dw_loc_descr_ref loc;
6933 rtx r;
6934
6935 CHECKSUM (at->dw_attr);
6936
6937 /* We don't care that this was compiled with a different compiler
6938 snapshot; if the output is the same, that's what matters. */
6939 if (at->dw_attr == DW_AT_producer)
6940 return;
6941
6942 switch (AT_class (at))
6943 {
6944 case dw_val_class_const:
6945 case dw_val_class_const_implicit:
6946 CHECKSUM (at->dw_attr_val.v.val_int);
6947 break;
6948 case dw_val_class_unsigned_const:
6949 case dw_val_class_unsigned_const_implicit:
6950 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6951 break;
6952 case dw_val_class_const_double:
6953 CHECKSUM (at->dw_attr_val.v.val_double);
6954 break;
6955 case dw_val_class_wide_int:
6956 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6957 get_full_len (*at->dw_attr_val.v.val_wide)
6958 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6959 break;
6960 case dw_val_class_vec:
6961 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6962 (at->dw_attr_val.v.val_vec.length
6963 * at->dw_attr_val.v.val_vec.elt_size));
6964 break;
6965 case dw_val_class_flag:
6966 CHECKSUM (at->dw_attr_val.v.val_flag);
6967 break;
6968 case dw_val_class_str:
6969 CHECKSUM_STRING (AT_string (at));
6970 break;
6971
6972 case dw_val_class_addr:
6973 r = AT_addr (at);
6974 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6975 CHECKSUM_STRING (XSTR (r, 0));
6976 break;
6977
6978 case dw_val_class_offset:
6979 CHECKSUM (at->dw_attr_val.v.val_offset);
6980 break;
6981
6982 case dw_val_class_loc:
6983 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6984 loc_checksum (loc, ctx);
6985 break;
6986
6987 case dw_val_class_die_ref:
6988 die_checksum (AT_ref (at), ctx, mark);
6989 break;
6990
6991 case dw_val_class_fde_ref:
6992 case dw_val_class_vms_delta:
6993 case dw_val_class_symview:
6994 case dw_val_class_lbl_id:
6995 case dw_val_class_lineptr:
6996 case dw_val_class_macptr:
6997 case dw_val_class_loclistsptr:
6998 case dw_val_class_high_pc:
6999 break;
7000
7001 case dw_val_class_file:
7002 case dw_val_class_file_implicit:
7003 CHECKSUM_STRING (AT_file (at)->filename);
7004 break;
7005
7006 case dw_val_class_data8:
7007 CHECKSUM (at->dw_attr_val.v.val_data8);
7008 break;
7009
7010 default:
7011 break;
7012 }
7013 }
7014
7015 /* Calculate the checksum of a DIE. */
7016
7017 static void
7018 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7019 {
7020 dw_die_ref c;
7021 dw_attr_node *a;
7022 unsigned ix;
7023
7024 /* To avoid infinite recursion. */
7025 if (die->die_mark)
7026 {
7027 CHECKSUM (die->die_mark);
7028 return;
7029 }
7030 die->die_mark = ++(*mark);
7031
7032 CHECKSUM (die->die_tag);
7033
7034 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7035 attr_checksum (a, ctx, mark);
7036
7037 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
7038 }
7039
7040 #undef CHECKSUM
7041 #undef CHECKSUM_BLOCK
7042 #undef CHECKSUM_STRING
7043
7044 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
7045 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7046 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7047 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7048 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7049 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7050 #define CHECKSUM_ATTR(FOO) \
7051 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7052
7053 /* Calculate the checksum of a number in signed LEB128 format. */
7054
7055 static void
7056 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7057 {
7058 unsigned char byte;
7059 bool more;
7060
7061 while (1)
7062 {
7063 byte = (value & 0x7f);
7064 value >>= 7;
7065 more = !((value == 0 && (byte & 0x40) == 0)
7066 || (value == -1 && (byte & 0x40) != 0));
7067 if (more)
7068 byte |= 0x80;
7069 CHECKSUM (byte);
7070 if (!more)
7071 break;
7072 }
7073 }
7074
7075 /* Calculate the checksum of a number in unsigned LEB128 format. */
7076
7077 static void
7078 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7079 {
7080 while (1)
7081 {
7082 unsigned char byte = (value & 0x7f);
7083 value >>= 7;
7084 if (value != 0)
7085 /* More bytes to follow. */
7086 byte |= 0x80;
7087 CHECKSUM (byte);
7088 if (value == 0)
7089 break;
7090 }
7091 }
7092
7093 /* Checksum the context of the DIE. This adds the names of any
7094 surrounding namespaces or structures to the checksum. */
7095
7096 static void
7097 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7098 {
7099 const char *name;
7100 dw_die_ref spec;
7101 int tag = die->die_tag;
7102
7103 if (tag != DW_TAG_namespace
7104 && tag != DW_TAG_structure_type
7105 && tag != DW_TAG_class_type)
7106 return;
7107
7108 name = get_AT_string (die, DW_AT_name);
7109
7110 spec = get_AT_ref (die, DW_AT_specification);
7111 if (spec != NULL)
7112 die = spec;
7113
7114 if (die->die_parent != NULL)
7115 checksum_die_context (die->die_parent, ctx);
7116
7117 CHECKSUM_ULEB128 ('C');
7118 CHECKSUM_ULEB128 (tag);
7119 if (name != NULL)
7120 CHECKSUM_STRING (name);
7121 }
7122
7123 /* Calculate the checksum of a location expression. */
7124
7125 static inline void
7126 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7127 {
7128 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7129 were emitted as a DW_FORM_sdata instead of a location expression. */
7130 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7131 {
7132 CHECKSUM_ULEB128 (DW_FORM_sdata);
7133 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7134 return;
7135 }
7136
7137 /* Otherwise, just checksum the raw location expression. */
7138 while (loc != NULL)
7139 {
7140 inchash::hash hstate;
7141 hashval_t hash;
7142
7143 CHECKSUM_ULEB128 (loc->dtprel);
7144 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7145 hash_loc_operands (loc, hstate);
7146 hash = hstate.end ();
7147 CHECKSUM (hash);
7148 loc = loc->dw_loc_next;
7149 }
7150 }
7151
7152 /* Calculate the checksum of an attribute. */
7153
7154 static void
7155 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7156 struct md5_ctx *ctx, int *mark)
7157 {
7158 dw_loc_descr_ref loc;
7159 rtx r;
7160
7161 if (AT_class (at) == dw_val_class_die_ref)
7162 {
7163 dw_die_ref target_die = AT_ref (at);
7164
7165 /* For pointer and reference types, we checksum only the (qualified)
7166 name of the target type (if there is a name). For friend entries,
7167 we checksum only the (qualified) name of the target type or function.
7168 This allows the checksum to remain the same whether the target type
7169 is complete or not. */
7170 if ((at->dw_attr == DW_AT_type
7171 && (tag == DW_TAG_pointer_type
7172 || tag == DW_TAG_reference_type
7173 || tag == DW_TAG_rvalue_reference_type
7174 || tag == DW_TAG_ptr_to_member_type))
7175 || (at->dw_attr == DW_AT_friend
7176 && tag == DW_TAG_friend))
7177 {
7178 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7179
7180 if (name_attr != NULL)
7181 {
7182 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7183
7184 if (decl == NULL)
7185 decl = target_die;
7186 CHECKSUM_ULEB128 ('N');
7187 CHECKSUM_ULEB128 (at->dw_attr);
7188 if (decl->die_parent != NULL)
7189 checksum_die_context (decl->die_parent, ctx);
7190 CHECKSUM_ULEB128 ('E');
7191 CHECKSUM_STRING (AT_string (name_attr));
7192 return;
7193 }
7194 }
7195
7196 /* For all other references to another DIE, we check to see if the
7197 target DIE has already been visited. If it has, we emit a
7198 backward reference; if not, we descend recursively. */
7199 if (target_die->die_mark > 0)
7200 {
7201 CHECKSUM_ULEB128 ('R');
7202 CHECKSUM_ULEB128 (at->dw_attr);
7203 CHECKSUM_ULEB128 (target_die->die_mark);
7204 }
7205 else
7206 {
7207 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7208
7209 if (decl == NULL)
7210 decl = target_die;
7211 target_die->die_mark = ++(*mark);
7212 CHECKSUM_ULEB128 ('T');
7213 CHECKSUM_ULEB128 (at->dw_attr);
7214 if (decl->die_parent != NULL)
7215 checksum_die_context (decl->die_parent, ctx);
7216 die_checksum_ordered (target_die, ctx, mark);
7217 }
7218 return;
7219 }
7220
7221 CHECKSUM_ULEB128 ('A');
7222 CHECKSUM_ULEB128 (at->dw_attr);
7223
7224 switch (AT_class (at))
7225 {
7226 case dw_val_class_const:
7227 case dw_val_class_const_implicit:
7228 CHECKSUM_ULEB128 (DW_FORM_sdata);
7229 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7230 break;
7231
7232 case dw_val_class_unsigned_const:
7233 case dw_val_class_unsigned_const_implicit:
7234 CHECKSUM_ULEB128 (DW_FORM_sdata);
7235 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7236 break;
7237
7238 case dw_val_class_const_double:
7239 CHECKSUM_ULEB128 (DW_FORM_block);
7240 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7241 CHECKSUM (at->dw_attr_val.v.val_double);
7242 break;
7243
7244 case dw_val_class_wide_int:
7245 CHECKSUM_ULEB128 (DW_FORM_block);
7246 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7247 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7248 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7249 get_full_len (*at->dw_attr_val.v.val_wide)
7250 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7251 break;
7252
7253 case dw_val_class_vec:
7254 CHECKSUM_ULEB128 (DW_FORM_block);
7255 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7256 * at->dw_attr_val.v.val_vec.elt_size);
7257 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7258 (at->dw_attr_val.v.val_vec.length
7259 * at->dw_attr_val.v.val_vec.elt_size));
7260 break;
7261
7262 case dw_val_class_flag:
7263 CHECKSUM_ULEB128 (DW_FORM_flag);
7264 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7265 break;
7266
7267 case dw_val_class_str:
7268 CHECKSUM_ULEB128 (DW_FORM_string);
7269 CHECKSUM_STRING (AT_string (at));
7270 break;
7271
7272 case dw_val_class_addr:
7273 r = AT_addr (at);
7274 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7275 CHECKSUM_ULEB128 (DW_FORM_string);
7276 CHECKSUM_STRING (XSTR (r, 0));
7277 break;
7278
7279 case dw_val_class_offset:
7280 CHECKSUM_ULEB128 (DW_FORM_sdata);
7281 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7282 break;
7283
7284 case dw_val_class_loc:
7285 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7286 loc_checksum_ordered (loc, ctx);
7287 break;
7288
7289 case dw_val_class_fde_ref:
7290 case dw_val_class_symview:
7291 case dw_val_class_lbl_id:
7292 case dw_val_class_lineptr:
7293 case dw_val_class_macptr:
7294 case dw_val_class_loclistsptr:
7295 case dw_val_class_high_pc:
7296 break;
7297
7298 case dw_val_class_file:
7299 case dw_val_class_file_implicit:
7300 CHECKSUM_ULEB128 (DW_FORM_string);
7301 CHECKSUM_STRING (AT_file (at)->filename);
7302 break;
7303
7304 case dw_val_class_data8:
7305 CHECKSUM (at->dw_attr_val.v.val_data8);
7306 break;
7307
7308 default:
7309 break;
7310 }
7311 }
7312
7313 struct checksum_attributes
7314 {
7315 dw_attr_node *at_name;
7316 dw_attr_node *at_type;
7317 dw_attr_node *at_friend;
7318 dw_attr_node *at_accessibility;
7319 dw_attr_node *at_address_class;
7320 dw_attr_node *at_alignment;
7321 dw_attr_node *at_allocated;
7322 dw_attr_node *at_artificial;
7323 dw_attr_node *at_associated;
7324 dw_attr_node *at_binary_scale;
7325 dw_attr_node *at_bit_offset;
7326 dw_attr_node *at_bit_size;
7327 dw_attr_node *at_bit_stride;
7328 dw_attr_node *at_byte_size;
7329 dw_attr_node *at_byte_stride;
7330 dw_attr_node *at_const_value;
7331 dw_attr_node *at_containing_type;
7332 dw_attr_node *at_count;
7333 dw_attr_node *at_data_location;
7334 dw_attr_node *at_data_member_location;
7335 dw_attr_node *at_decimal_scale;
7336 dw_attr_node *at_decimal_sign;
7337 dw_attr_node *at_default_value;
7338 dw_attr_node *at_digit_count;
7339 dw_attr_node *at_discr;
7340 dw_attr_node *at_discr_list;
7341 dw_attr_node *at_discr_value;
7342 dw_attr_node *at_encoding;
7343 dw_attr_node *at_endianity;
7344 dw_attr_node *at_explicit;
7345 dw_attr_node *at_is_optional;
7346 dw_attr_node *at_location;
7347 dw_attr_node *at_lower_bound;
7348 dw_attr_node *at_mutable;
7349 dw_attr_node *at_ordering;
7350 dw_attr_node *at_picture_string;
7351 dw_attr_node *at_prototyped;
7352 dw_attr_node *at_small;
7353 dw_attr_node *at_segment;
7354 dw_attr_node *at_string_length;
7355 dw_attr_node *at_string_length_bit_size;
7356 dw_attr_node *at_string_length_byte_size;
7357 dw_attr_node *at_threads_scaled;
7358 dw_attr_node *at_upper_bound;
7359 dw_attr_node *at_use_location;
7360 dw_attr_node *at_use_UTF8;
7361 dw_attr_node *at_variable_parameter;
7362 dw_attr_node *at_virtuality;
7363 dw_attr_node *at_visibility;
7364 dw_attr_node *at_vtable_elem_location;
7365 };
7366
7367 /* Collect the attributes that we will want to use for the checksum. */
7368
7369 static void
7370 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7371 {
7372 dw_attr_node *a;
7373 unsigned ix;
7374
7375 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7376 {
7377 switch (a->dw_attr)
7378 {
7379 case DW_AT_name:
7380 attrs->at_name = a;
7381 break;
7382 case DW_AT_type:
7383 attrs->at_type = a;
7384 break;
7385 case DW_AT_friend:
7386 attrs->at_friend = a;
7387 break;
7388 case DW_AT_accessibility:
7389 attrs->at_accessibility = a;
7390 break;
7391 case DW_AT_address_class:
7392 attrs->at_address_class = a;
7393 break;
7394 case DW_AT_alignment:
7395 attrs->at_alignment = a;
7396 break;
7397 case DW_AT_allocated:
7398 attrs->at_allocated = a;
7399 break;
7400 case DW_AT_artificial:
7401 attrs->at_artificial = a;
7402 break;
7403 case DW_AT_associated:
7404 attrs->at_associated = a;
7405 break;
7406 case DW_AT_binary_scale:
7407 attrs->at_binary_scale = a;
7408 break;
7409 case DW_AT_bit_offset:
7410 attrs->at_bit_offset = a;
7411 break;
7412 case DW_AT_bit_size:
7413 attrs->at_bit_size = a;
7414 break;
7415 case DW_AT_bit_stride:
7416 attrs->at_bit_stride = a;
7417 break;
7418 case DW_AT_byte_size:
7419 attrs->at_byte_size = a;
7420 break;
7421 case DW_AT_byte_stride:
7422 attrs->at_byte_stride = a;
7423 break;
7424 case DW_AT_const_value:
7425 attrs->at_const_value = a;
7426 break;
7427 case DW_AT_containing_type:
7428 attrs->at_containing_type = a;
7429 break;
7430 case DW_AT_count:
7431 attrs->at_count = a;
7432 break;
7433 case DW_AT_data_location:
7434 attrs->at_data_location = a;
7435 break;
7436 case DW_AT_data_member_location:
7437 attrs->at_data_member_location = a;
7438 break;
7439 case DW_AT_decimal_scale:
7440 attrs->at_decimal_scale = a;
7441 break;
7442 case DW_AT_decimal_sign:
7443 attrs->at_decimal_sign = a;
7444 break;
7445 case DW_AT_default_value:
7446 attrs->at_default_value = a;
7447 break;
7448 case DW_AT_digit_count:
7449 attrs->at_digit_count = a;
7450 break;
7451 case DW_AT_discr:
7452 attrs->at_discr = a;
7453 break;
7454 case DW_AT_discr_list:
7455 attrs->at_discr_list = a;
7456 break;
7457 case DW_AT_discr_value:
7458 attrs->at_discr_value = a;
7459 break;
7460 case DW_AT_encoding:
7461 attrs->at_encoding = a;
7462 break;
7463 case DW_AT_endianity:
7464 attrs->at_endianity = a;
7465 break;
7466 case DW_AT_explicit:
7467 attrs->at_explicit = a;
7468 break;
7469 case DW_AT_is_optional:
7470 attrs->at_is_optional = a;
7471 break;
7472 case DW_AT_location:
7473 attrs->at_location = a;
7474 break;
7475 case DW_AT_lower_bound:
7476 attrs->at_lower_bound = a;
7477 break;
7478 case DW_AT_mutable:
7479 attrs->at_mutable = a;
7480 break;
7481 case DW_AT_ordering:
7482 attrs->at_ordering = a;
7483 break;
7484 case DW_AT_picture_string:
7485 attrs->at_picture_string = a;
7486 break;
7487 case DW_AT_prototyped:
7488 attrs->at_prototyped = a;
7489 break;
7490 case DW_AT_small:
7491 attrs->at_small = a;
7492 break;
7493 case DW_AT_segment:
7494 attrs->at_segment = a;
7495 break;
7496 case DW_AT_string_length:
7497 attrs->at_string_length = a;
7498 break;
7499 case DW_AT_string_length_bit_size:
7500 attrs->at_string_length_bit_size = a;
7501 break;
7502 case DW_AT_string_length_byte_size:
7503 attrs->at_string_length_byte_size = a;
7504 break;
7505 case DW_AT_threads_scaled:
7506 attrs->at_threads_scaled = a;
7507 break;
7508 case DW_AT_upper_bound:
7509 attrs->at_upper_bound = a;
7510 break;
7511 case DW_AT_use_location:
7512 attrs->at_use_location = a;
7513 break;
7514 case DW_AT_use_UTF8:
7515 attrs->at_use_UTF8 = a;
7516 break;
7517 case DW_AT_variable_parameter:
7518 attrs->at_variable_parameter = a;
7519 break;
7520 case DW_AT_virtuality:
7521 attrs->at_virtuality = a;
7522 break;
7523 case DW_AT_visibility:
7524 attrs->at_visibility = a;
7525 break;
7526 case DW_AT_vtable_elem_location:
7527 attrs->at_vtable_elem_location = a;
7528 break;
7529 default:
7530 break;
7531 }
7532 }
7533 }
7534
7535 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7536
7537 static void
7538 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7539 {
7540 dw_die_ref c;
7541 dw_die_ref decl;
7542 struct checksum_attributes attrs;
7543
7544 CHECKSUM_ULEB128 ('D');
7545 CHECKSUM_ULEB128 (die->die_tag);
7546
7547 memset (&attrs, 0, sizeof (attrs));
7548
7549 decl = get_AT_ref (die, DW_AT_specification);
7550 if (decl != NULL)
7551 collect_checksum_attributes (&attrs, decl);
7552 collect_checksum_attributes (&attrs, die);
7553
7554 CHECKSUM_ATTR (attrs.at_name);
7555 CHECKSUM_ATTR (attrs.at_accessibility);
7556 CHECKSUM_ATTR (attrs.at_address_class);
7557 CHECKSUM_ATTR (attrs.at_allocated);
7558 CHECKSUM_ATTR (attrs.at_artificial);
7559 CHECKSUM_ATTR (attrs.at_associated);
7560 CHECKSUM_ATTR (attrs.at_binary_scale);
7561 CHECKSUM_ATTR (attrs.at_bit_offset);
7562 CHECKSUM_ATTR (attrs.at_bit_size);
7563 CHECKSUM_ATTR (attrs.at_bit_stride);
7564 CHECKSUM_ATTR (attrs.at_byte_size);
7565 CHECKSUM_ATTR (attrs.at_byte_stride);
7566 CHECKSUM_ATTR (attrs.at_const_value);
7567 CHECKSUM_ATTR (attrs.at_containing_type);
7568 CHECKSUM_ATTR (attrs.at_count);
7569 CHECKSUM_ATTR (attrs.at_data_location);
7570 CHECKSUM_ATTR (attrs.at_data_member_location);
7571 CHECKSUM_ATTR (attrs.at_decimal_scale);
7572 CHECKSUM_ATTR (attrs.at_decimal_sign);
7573 CHECKSUM_ATTR (attrs.at_default_value);
7574 CHECKSUM_ATTR (attrs.at_digit_count);
7575 CHECKSUM_ATTR (attrs.at_discr);
7576 CHECKSUM_ATTR (attrs.at_discr_list);
7577 CHECKSUM_ATTR (attrs.at_discr_value);
7578 CHECKSUM_ATTR (attrs.at_encoding);
7579 CHECKSUM_ATTR (attrs.at_endianity);
7580 CHECKSUM_ATTR (attrs.at_explicit);
7581 CHECKSUM_ATTR (attrs.at_is_optional);
7582 CHECKSUM_ATTR (attrs.at_location);
7583 CHECKSUM_ATTR (attrs.at_lower_bound);
7584 CHECKSUM_ATTR (attrs.at_mutable);
7585 CHECKSUM_ATTR (attrs.at_ordering);
7586 CHECKSUM_ATTR (attrs.at_picture_string);
7587 CHECKSUM_ATTR (attrs.at_prototyped);
7588 CHECKSUM_ATTR (attrs.at_small);
7589 CHECKSUM_ATTR (attrs.at_segment);
7590 CHECKSUM_ATTR (attrs.at_string_length);
7591 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7592 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7593 CHECKSUM_ATTR (attrs.at_threads_scaled);
7594 CHECKSUM_ATTR (attrs.at_upper_bound);
7595 CHECKSUM_ATTR (attrs.at_use_location);
7596 CHECKSUM_ATTR (attrs.at_use_UTF8);
7597 CHECKSUM_ATTR (attrs.at_variable_parameter);
7598 CHECKSUM_ATTR (attrs.at_virtuality);
7599 CHECKSUM_ATTR (attrs.at_visibility);
7600 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7601 CHECKSUM_ATTR (attrs.at_type);
7602 CHECKSUM_ATTR (attrs.at_friend);
7603 CHECKSUM_ATTR (attrs.at_alignment);
7604
7605 /* Checksum the child DIEs. */
7606 c = die->die_child;
7607 if (c) do {
7608 dw_attr_node *name_attr;
7609
7610 c = c->die_sib;
7611 name_attr = get_AT (c, DW_AT_name);
7612 if (is_template_instantiation (c))
7613 {
7614 /* Ignore instantiations of member type and function templates. */
7615 }
7616 else if (name_attr != NULL
7617 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7618 {
7619 /* Use a shallow checksum for named nested types and member
7620 functions. */
7621 CHECKSUM_ULEB128 ('S');
7622 CHECKSUM_ULEB128 (c->die_tag);
7623 CHECKSUM_STRING (AT_string (name_attr));
7624 }
7625 else
7626 {
7627 /* Use a deep checksum for other children. */
7628 /* Mark this DIE so it gets processed when unmarking. */
7629 if (c->die_mark == 0)
7630 c->die_mark = -1;
7631 die_checksum_ordered (c, ctx, mark);
7632 }
7633 } while (c != die->die_child);
7634
7635 CHECKSUM_ULEB128 (0);
7636 }
7637
7638 /* Add a type name and tag to a hash. */
7639 static void
7640 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7641 {
7642 CHECKSUM_ULEB128 (tag);
7643 CHECKSUM_STRING (name);
7644 }
7645
7646 #undef CHECKSUM
7647 #undef CHECKSUM_STRING
7648 #undef CHECKSUM_ATTR
7649 #undef CHECKSUM_LEB128
7650 #undef CHECKSUM_ULEB128
7651
7652 /* Generate the type signature for DIE. This is computed by generating an
7653 MD5 checksum over the DIE's tag, its relevant attributes, and its
7654 children. Attributes that are references to other DIEs are processed
7655 by recursion, using the MARK field to prevent infinite recursion.
7656 If the DIE is nested inside a namespace or another type, we also
7657 need to include that context in the signature. The lower 64 bits
7658 of the resulting MD5 checksum comprise the signature. */
7659
7660 static void
7661 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7662 {
7663 int mark;
7664 const char *name;
7665 unsigned char checksum[16];
7666 struct md5_ctx ctx;
7667 dw_die_ref decl;
7668 dw_die_ref parent;
7669
7670 name = get_AT_string (die, DW_AT_name);
7671 decl = get_AT_ref (die, DW_AT_specification);
7672 parent = get_die_parent (die);
7673
7674 /* First, compute a signature for just the type name (and its surrounding
7675 context, if any. This is stored in the type unit DIE for link-time
7676 ODR (one-definition rule) checking. */
7677
7678 if (is_cxx () && name != NULL)
7679 {
7680 md5_init_ctx (&ctx);
7681
7682 /* Checksum the names of surrounding namespaces and structures. */
7683 if (parent != NULL)
7684 checksum_die_context (parent, &ctx);
7685
7686 /* Checksum the current DIE. */
7687 die_odr_checksum (die->die_tag, name, &ctx);
7688 md5_finish_ctx (&ctx, checksum);
7689
7690 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7691 }
7692
7693 /* Next, compute the complete type signature. */
7694
7695 md5_init_ctx (&ctx);
7696 mark = 1;
7697 die->die_mark = mark;
7698
7699 /* Checksum the names of surrounding namespaces and structures. */
7700 if (parent != NULL)
7701 checksum_die_context (parent, &ctx);
7702
7703 /* Checksum the DIE and its children. */
7704 die_checksum_ordered (die, &ctx, &mark);
7705 unmark_all_dies (die);
7706 md5_finish_ctx (&ctx, checksum);
7707
7708 /* Store the signature in the type node and link the type DIE and the
7709 type node together. */
7710 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7711 DWARF_TYPE_SIGNATURE_SIZE);
7712 die->comdat_type_p = true;
7713 die->die_id.die_type_node = type_node;
7714 type_node->type_die = die;
7715
7716 /* If the DIE is a specification, link its declaration to the type node
7717 as well. */
7718 if (decl != NULL)
7719 {
7720 decl->comdat_type_p = true;
7721 decl->die_id.die_type_node = type_node;
7722 }
7723 }
7724
7725 /* Do the location expressions look same? */
7726 static inline int
7727 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7728 {
7729 return loc1->dw_loc_opc == loc2->dw_loc_opc
7730 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7731 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7732 }
7733
7734 /* Do the values look the same? */
7735 static int
7736 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7737 {
7738 dw_loc_descr_ref loc1, loc2;
7739 rtx r1, r2;
7740
7741 if (v1->val_class != v2->val_class)
7742 return 0;
7743
7744 switch (v1->val_class)
7745 {
7746 case dw_val_class_const:
7747 case dw_val_class_const_implicit:
7748 return v1->v.val_int == v2->v.val_int;
7749 case dw_val_class_unsigned_const:
7750 case dw_val_class_unsigned_const_implicit:
7751 return v1->v.val_unsigned == v2->v.val_unsigned;
7752 case dw_val_class_const_double:
7753 return v1->v.val_double.high == v2->v.val_double.high
7754 && v1->v.val_double.low == v2->v.val_double.low;
7755 case dw_val_class_wide_int:
7756 return *v1->v.val_wide == *v2->v.val_wide;
7757 case dw_val_class_vec:
7758 if (v1->v.val_vec.length != v2->v.val_vec.length
7759 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7760 return 0;
7761 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7762 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7763 return 0;
7764 return 1;
7765 case dw_val_class_flag:
7766 return v1->v.val_flag == v2->v.val_flag;
7767 case dw_val_class_str:
7768 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7769
7770 case dw_val_class_addr:
7771 r1 = v1->v.val_addr;
7772 r2 = v2->v.val_addr;
7773 if (GET_CODE (r1) != GET_CODE (r2))
7774 return 0;
7775 return !rtx_equal_p (r1, r2);
7776
7777 case dw_val_class_offset:
7778 return v1->v.val_offset == v2->v.val_offset;
7779
7780 case dw_val_class_loc:
7781 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7782 loc1 && loc2;
7783 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7784 if (!same_loc_p (loc1, loc2, mark))
7785 return 0;
7786 return !loc1 && !loc2;
7787
7788 case dw_val_class_die_ref:
7789 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7790
7791 case dw_val_class_symview:
7792 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7793
7794 case dw_val_class_fde_ref:
7795 case dw_val_class_vms_delta:
7796 case dw_val_class_lbl_id:
7797 case dw_val_class_lineptr:
7798 case dw_val_class_macptr:
7799 case dw_val_class_loclistsptr:
7800 case dw_val_class_high_pc:
7801 return 1;
7802
7803 case dw_val_class_file:
7804 case dw_val_class_file_implicit:
7805 return v1->v.val_file == v2->v.val_file;
7806
7807 case dw_val_class_data8:
7808 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7809
7810 default:
7811 return 1;
7812 }
7813 }
7814
7815 /* Do the attributes look the same? */
7816
7817 static int
7818 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7819 {
7820 if (at1->dw_attr != at2->dw_attr)
7821 return 0;
7822
7823 /* We don't care that this was compiled with a different compiler
7824 snapshot; if the output is the same, that's what matters. */
7825 if (at1->dw_attr == DW_AT_producer)
7826 return 1;
7827
7828 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7829 }
7830
7831 /* Do the dies look the same? */
7832
7833 static int
7834 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7835 {
7836 dw_die_ref c1, c2;
7837 dw_attr_node *a1;
7838 unsigned ix;
7839
7840 /* To avoid infinite recursion. */
7841 if (die1->die_mark)
7842 return die1->die_mark == die2->die_mark;
7843 die1->die_mark = die2->die_mark = ++(*mark);
7844
7845 if (die1->die_tag != die2->die_tag)
7846 return 0;
7847
7848 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7849 return 0;
7850
7851 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7852 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7853 return 0;
7854
7855 c1 = die1->die_child;
7856 c2 = die2->die_child;
7857 if (! c1)
7858 {
7859 if (c2)
7860 return 0;
7861 }
7862 else
7863 for (;;)
7864 {
7865 if (!same_die_p (c1, c2, mark))
7866 return 0;
7867 c1 = c1->die_sib;
7868 c2 = c2->die_sib;
7869 if (c1 == die1->die_child)
7870 {
7871 if (c2 == die2->die_child)
7872 break;
7873 else
7874 return 0;
7875 }
7876 }
7877
7878 return 1;
7879 }
7880
7881 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7882 children, and set die_symbol. */
7883
7884 static void
7885 compute_comp_unit_symbol (dw_die_ref unit_die)
7886 {
7887 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7888 const char *base = die_name ? lbasename (die_name) : "anonymous";
7889 char *name = XALLOCAVEC (char, strlen (base) + 64);
7890 char *p;
7891 int i, mark;
7892 unsigned char checksum[16];
7893 struct md5_ctx ctx;
7894
7895 /* Compute the checksum of the DIE, then append part of it as hex digits to
7896 the name filename of the unit. */
7897
7898 md5_init_ctx (&ctx);
7899 mark = 0;
7900 die_checksum (unit_die, &ctx, &mark);
7901 unmark_all_dies (unit_die);
7902 md5_finish_ctx (&ctx, checksum);
7903
7904 /* When we this for comp_unit_die () we have a DW_AT_name that might
7905 not start with a letter but with anything valid for filenames and
7906 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7907 character is not a letter. */
7908 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7909 clean_symbol_name (name);
7910
7911 p = name + strlen (name);
7912 for (i = 0; i < 4; i++)
7913 {
7914 sprintf (p, "%.2x", checksum[i]);
7915 p += 2;
7916 }
7917
7918 unit_die->die_id.die_symbol = xstrdup (name);
7919 }
7920
7921 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7922
7923 static int
7924 is_type_die (dw_die_ref die)
7925 {
7926 switch (die->die_tag)
7927 {
7928 case DW_TAG_array_type:
7929 case DW_TAG_class_type:
7930 case DW_TAG_interface_type:
7931 case DW_TAG_enumeration_type:
7932 case DW_TAG_pointer_type:
7933 case DW_TAG_reference_type:
7934 case DW_TAG_rvalue_reference_type:
7935 case DW_TAG_string_type:
7936 case DW_TAG_structure_type:
7937 case DW_TAG_subroutine_type:
7938 case DW_TAG_union_type:
7939 case DW_TAG_ptr_to_member_type:
7940 case DW_TAG_set_type:
7941 case DW_TAG_subrange_type:
7942 case DW_TAG_base_type:
7943 case DW_TAG_const_type:
7944 case DW_TAG_file_type:
7945 case DW_TAG_packed_type:
7946 case DW_TAG_volatile_type:
7947 case DW_TAG_typedef:
7948 return 1;
7949 default:
7950 return 0;
7951 }
7952 }
7953
7954 /* Returns true iff C is a compile-unit DIE. */
7955
7956 static inline bool
7957 is_cu_die (dw_die_ref c)
7958 {
7959 return c && (c->die_tag == DW_TAG_compile_unit
7960 || c->die_tag == DW_TAG_skeleton_unit);
7961 }
7962
7963 /* Returns true iff C is a unit DIE of some sort. */
7964
7965 static inline bool
7966 is_unit_die (dw_die_ref c)
7967 {
7968 return c && (c->die_tag == DW_TAG_compile_unit
7969 || c->die_tag == DW_TAG_partial_unit
7970 || c->die_tag == DW_TAG_type_unit
7971 || c->die_tag == DW_TAG_skeleton_unit);
7972 }
7973
7974 /* Returns true iff C is a namespace DIE. */
7975
7976 static inline bool
7977 is_namespace_die (dw_die_ref c)
7978 {
7979 return c && c->die_tag == DW_TAG_namespace;
7980 }
7981
7982 /* Return non-zero if this DIE is a template parameter. */
7983
7984 static inline bool
7985 is_template_parameter (dw_die_ref die)
7986 {
7987 switch (die->die_tag)
7988 {
7989 case DW_TAG_template_type_param:
7990 case DW_TAG_template_value_param:
7991 case DW_TAG_GNU_template_template_param:
7992 case DW_TAG_GNU_template_parameter_pack:
7993 return true;
7994 default:
7995 return false;
7996 }
7997 }
7998
7999 /* Return non-zero if this DIE represents a template instantiation. */
8000
8001 static inline bool
8002 is_template_instantiation (dw_die_ref die)
8003 {
8004 dw_die_ref c;
8005
8006 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
8007 return false;
8008 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
8009 return false;
8010 }
8011
8012 static char *
8013 gen_internal_sym (const char *prefix)
8014 {
8015 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
8016
8017 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
8018 return xstrdup (buf);
8019 }
8020
8021 /* Return non-zero if this DIE is a declaration. */
8022
8023 static int
8024 is_declaration_die (dw_die_ref die)
8025 {
8026 dw_attr_node *a;
8027 unsigned ix;
8028
8029 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8030 if (a->dw_attr == DW_AT_declaration)
8031 return 1;
8032
8033 return 0;
8034 }
8035
8036 /* Return non-zero if this DIE is nested inside a subprogram. */
8037
8038 static int
8039 is_nested_in_subprogram (dw_die_ref die)
8040 {
8041 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8042
8043 if (decl == NULL)
8044 decl = die;
8045 return local_scope_p (decl);
8046 }
8047
8048 /* Return non-zero if this DIE contains a defining declaration of a
8049 subprogram. */
8050
8051 static int
8052 contains_subprogram_definition (dw_die_ref die)
8053 {
8054 dw_die_ref c;
8055
8056 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8057 return 1;
8058 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8059 return 0;
8060 }
8061
8062 /* Return non-zero if this is a type DIE that should be moved to a
8063 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8064 unit type. */
8065
8066 static int
8067 should_move_die_to_comdat (dw_die_ref die)
8068 {
8069 switch (die->die_tag)
8070 {
8071 case DW_TAG_class_type:
8072 case DW_TAG_structure_type:
8073 case DW_TAG_enumeration_type:
8074 case DW_TAG_union_type:
8075 /* Don't move declarations, inlined instances, types nested in a
8076 subprogram, or types that contain subprogram definitions. */
8077 if (is_declaration_die (die)
8078 || get_AT (die, DW_AT_abstract_origin)
8079 || is_nested_in_subprogram (die)
8080 || contains_subprogram_definition (die))
8081 return 0;
8082 return 1;
8083 case DW_TAG_array_type:
8084 case DW_TAG_interface_type:
8085 case DW_TAG_pointer_type:
8086 case DW_TAG_reference_type:
8087 case DW_TAG_rvalue_reference_type:
8088 case DW_TAG_string_type:
8089 case DW_TAG_subroutine_type:
8090 case DW_TAG_ptr_to_member_type:
8091 case DW_TAG_set_type:
8092 case DW_TAG_subrange_type:
8093 case DW_TAG_base_type:
8094 case DW_TAG_const_type:
8095 case DW_TAG_file_type:
8096 case DW_TAG_packed_type:
8097 case DW_TAG_volatile_type:
8098 case DW_TAG_typedef:
8099 default:
8100 return 0;
8101 }
8102 }
8103
8104 /* Make a clone of DIE. */
8105
8106 static dw_die_ref
8107 clone_die (dw_die_ref die)
8108 {
8109 dw_die_ref clone = new_die_raw (die->die_tag);
8110 dw_attr_node *a;
8111 unsigned ix;
8112
8113 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8114 add_dwarf_attr (clone, a);
8115
8116 return clone;
8117 }
8118
8119 /* Make a clone of the tree rooted at DIE. */
8120
8121 static dw_die_ref
8122 clone_tree (dw_die_ref die)
8123 {
8124 dw_die_ref c;
8125 dw_die_ref clone = clone_die (die);
8126
8127 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8128
8129 return clone;
8130 }
8131
8132 /* Make a clone of DIE as a declaration. */
8133
8134 static dw_die_ref
8135 clone_as_declaration (dw_die_ref die)
8136 {
8137 dw_die_ref clone;
8138 dw_die_ref decl;
8139 dw_attr_node *a;
8140 unsigned ix;
8141
8142 /* If the DIE is already a declaration, just clone it. */
8143 if (is_declaration_die (die))
8144 return clone_die (die);
8145
8146 /* If the DIE is a specification, just clone its declaration DIE. */
8147 decl = get_AT_ref (die, DW_AT_specification);
8148 if (decl != NULL)
8149 {
8150 clone = clone_die (decl);
8151 if (die->comdat_type_p)
8152 add_AT_die_ref (clone, DW_AT_signature, die);
8153 return clone;
8154 }
8155
8156 clone = new_die_raw (die->die_tag);
8157
8158 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8159 {
8160 /* We don't want to copy over all attributes.
8161 For example we don't want DW_AT_byte_size because otherwise we will no
8162 longer have a declaration and GDB will treat it as a definition. */
8163
8164 switch (a->dw_attr)
8165 {
8166 case DW_AT_abstract_origin:
8167 case DW_AT_artificial:
8168 case DW_AT_containing_type:
8169 case DW_AT_external:
8170 case DW_AT_name:
8171 case DW_AT_type:
8172 case DW_AT_virtuality:
8173 case DW_AT_linkage_name:
8174 case DW_AT_MIPS_linkage_name:
8175 add_dwarf_attr (clone, a);
8176 break;
8177 case DW_AT_byte_size:
8178 case DW_AT_alignment:
8179 default:
8180 break;
8181 }
8182 }
8183
8184 if (die->comdat_type_p)
8185 add_AT_die_ref (clone, DW_AT_signature, die);
8186
8187 add_AT_flag (clone, DW_AT_declaration, 1);
8188 return clone;
8189 }
8190
8191
8192 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8193
8194 struct decl_table_entry
8195 {
8196 dw_die_ref orig;
8197 dw_die_ref copy;
8198 };
8199
8200 /* Helpers to manipulate hash table of copied declarations. */
8201
8202 /* Hashtable helpers. */
8203
8204 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8205 {
8206 typedef die_struct *compare_type;
8207 static inline hashval_t hash (const decl_table_entry *);
8208 static inline bool equal (const decl_table_entry *, const die_struct *);
8209 };
8210
8211 inline hashval_t
8212 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8213 {
8214 return htab_hash_pointer (entry->orig);
8215 }
8216
8217 inline bool
8218 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8219 const die_struct *entry2)
8220 {
8221 return entry1->orig == entry2;
8222 }
8223
8224 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8225
8226 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8227 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8228 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8229 to check if the ancestor has already been copied into UNIT. */
8230
8231 static dw_die_ref
8232 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8233 decl_hash_type *decl_table)
8234 {
8235 dw_die_ref parent = die->die_parent;
8236 dw_die_ref new_parent = unit;
8237 dw_die_ref copy;
8238 decl_table_entry **slot = NULL;
8239 struct decl_table_entry *entry = NULL;
8240
8241 /* If DIE refers to a stub unfold that so we get the appropriate
8242 DIE registered as orig in decl_table. */
8243 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8244 die = c;
8245
8246 if (decl_table)
8247 {
8248 /* Check if the entry has already been copied to UNIT. */
8249 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8250 INSERT);
8251 if (*slot != HTAB_EMPTY_ENTRY)
8252 {
8253 entry = *slot;
8254 return entry->copy;
8255 }
8256
8257 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8258 entry = XCNEW (struct decl_table_entry);
8259 entry->orig = die;
8260 entry->copy = NULL;
8261 *slot = entry;
8262 }
8263
8264 if (parent != NULL)
8265 {
8266 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8267 if (spec != NULL)
8268 parent = spec;
8269 if (!is_unit_die (parent))
8270 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8271 }
8272
8273 copy = clone_as_declaration (die);
8274 add_child_die (new_parent, copy);
8275
8276 if (decl_table)
8277 {
8278 /* Record the pointer to the copy. */
8279 entry->copy = copy;
8280 }
8281
8282 return copy;
8283 }
8284 /* Copy the declaration context to the new type unit DIE. This includes
8285 any surrounding namespace or type declarations. If the DIE has an
8286 AT_specification attribute, it also includes attributes and children
8287 attached to the specification, and returns a pointer to the original
8288 parent of the declaration DIE. Returns NULL otherwise. */
8289
8290 static dw_die_ref
8291 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8292 {
8293 dw_die_ref decl;
8294 dw_die_ref new_decl;
8295 dw_die_ref orig_parent = NULL;
8296
8297 decl = get_AT_ref (die, DW_AT_specification);
8298 if (decl == NULL)
8299 decl = die;
8300 else
8301 {
8302 unsigned ix;
8303 dw_die_ref c;
8304 dw_attr_node *a;
8305
8306 /* The original DIE will be changed to a declaration, and must
8307 be moved to be a child of the original declaration DIE. */
8308 orig_parent = decl->die_parent;
8309
8310 /* Copy the type node pointer from the new DIE to the original
8311 declaration DIE so we can forward references later. */
8312 decl->comdat_type_p = true;
8313 decl->die_id.die_type_node = die->die_id.die_type_node;
8314
8315 remove_AT (die, DW_AT_specification);
8316
8317 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8318 {
8319 if (a->dw_attr != DW_AT_name
8320 && a->dw_attr != DW_AT_declaration
8321 && a->dw_attr != DW_AT_external)
8322 add_dwarf_attr (die, a);
8323 }
8324
8325 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8326 }
8327
8328 if (decl->die_parent != NULL
8329 && !is_unit_die (decl->die_parent))
8330 {
8331 new_decl = copy_ancestor_tree (unit, decl, NULL);
8332 if (new_decl != NULL)
8333 {
8334 remove_AT (new_decl, DW_AT_signature);
8335 add_AT_specification (die, new_decl);
8336 }
8337 }
8338
8339 return orig_parent;
8340 }
8341
8342 /* Generate the skeleton ancestor tree for the given NODE, then clone
8343 the DIE and add the clone into the tree. */
8344
8345 static void
8346 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8347 {
8348 if (node->new_die != NULL)
8349 return;
8350
8351 node->new_die = clone_as_declaration (node->old_die);
8352
8353 if (node->parent != NULL)
8354 {
8355 generate_skeleton_ancestor_tree (node->parent);
8356 add_child_die (node->parent->new_die, node->new_die);
8357 }
8358 }
8359
8360 /* Generate a skeleton tree of DIEs containing any declarations that are
8361 found in the original tree. We traverse the tree looking for declaration
8362 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8363
8364 static void
8365 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8366 {
8367 skeleton_chain_node node;
8368 dw_die_ref c;
8369 dw_die_ref first;
8370 dw_die_ref prev = NULL;
8371 dw_die_ref next = NULL;
8372
8373 node.parent = parent;
8374
8375 first = c = parent->old_die->die_child;
8376 if (c)
8377 next = c->die_sib;
8378 if (c) do {
8379 if (prev == NULL || prev->die_sib == c)
8380 prev = c;
8381 c = next;
8382 next = (c == first ? NULL : c->die_sib);
8383 node.old_die = c;
8384 node.new_die = NULL;
8385 if (is_declaration_die (c))
8386 {
8387 if (is_template_instantiation (c))
8388 {
8389 /* Instantiated templates do not need to be cloned into the
8390 type unit. Just move the DIE and its children back to
8391 the skeleton tree (in the main CU). */
8392 remove_child_with_prev (c, prev);
8393 add_child_die (parent->new_die, c);
8394 c = prev;
8395 }
8396 else if (c->comdat_type_p)
8397 {
8398 /* This is the skeleton of earlier break_out_comdat_types
8399 type. Clone the existing DIE, but keep the children
8400 under the original (which is in the main CU). */
8401 dw_die_ref clone = clone_die (c);
8402
8403 replace_child (c, clone, prev);
8404 generate_skeleton_ancestor_tree (parent);
8405 add_child_die (parent->new_die, c);
8406 c = clone;
8407 continue;
8408 }
8409 else
8410 {
8411 /* Clone the existing DIE, move the original to the skeleton
8412 tree (which is in the main CU), and put the clone, with
8413 all the original's children, where the original came from
8414 (which is about to be moved to the type unit). */
8415 dw_die_ref clone = clone_die (c);
8416 move_all_children (c, clone);
8417
8418 /* If the original has a DW_AT_object_pointer attribute,
8419 it would now point to a child DIE just moved to the
8420 cloned tree, so we need to remove that attribute from
8421 the original. */
8422 remove_AT (c, DW_AT_object_pointer);
8423
8424 replace_child (c, clone, prev);
8425 generate_skeleton_ancestor_tree (parent);
8426 add_child_die (parent->new_die, c);
8427 node.old_die = clone;
8428 node.new_die = c;
8429 c = clone;
8430 }
8431 }
8432 generate_skeleton_bottom_up (&node);
8433 } while (next != NULL);
8434 }
8435
8436 /* Wrapper function for generate_skeleton_bottom_up. */
8437
8438 static dw_die_ref
8439 generate_skeleton (dw_die_ref die)
8440 {
8441 skeleton_chain_node node;
8442
8443 node.old_die = die;
8444 node.new_die = NULL;
8445 node.parent = NULL;
8446
8447 /* If this type definition is nested inside another type,
8448 and is not an instantiation of a template, always leave
8449 at least a declaration in its place. */
8450 if (die->die_parent != NULL
8451 && is_type_die (die->die_parent)
8452 && !is_template_instantiation (die))
8453 node.new_die = clone_as_declaration (die);
8454
8455 generate_skeleton_bottom_up (&node);
8456 return node.new_die;
8457 }
8458
8459 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8460 declaration. The original DIE is moved to a new compile unit so that
8461 existing references to it follow it to the new location. If any of the
8462 original DIE's descendants is a declaration, we need to replace the
8463 original DIE with a skeleton tree and move the declarations back into the
8464 skeleton tree. */
8465
8466 static dw_die_ref
8467 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8468 dw_die_ref prev)
8469 {
8470 dw_die_ref skeleton, orig_parent;
8471
8472 /* Copy the declaration context to the type unit DIE. If the returned
8473 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8474 that DIE. */
8475 orig_parent = copy_declaration_context (unit, child);
8476
8477 skeleton = generate_skeleton (child);
8478 if (skeleton == NULL)
8479 remove_child_with_prev (child, prev);
8480 else
8481 {
8482 skeleton->comdat_type_p = true;
8483 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8484
8485 /* If the original DIE was a specification, we need to put
8486 the skeleton under the parent DIE of the declaration.
8487 This leaves the original declaration in the tree, but
8488 it will be pruned later since there are no longer any
8489 references to it. */
8490 if (orig_parent != NULL)
8491 {
8492 remove_child_with_prev (child, prev);
8493 add_child_die (orig_parent, skeleton);
8494 }
8495 else
8496 replace_child (child, skeleton, prev);
8497 }
8498
8499 return skeleton;
8500 }
8501
8502 static void
8503 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8504 comdat_type_node *type_node,
8505 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8506
8507 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8508 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8509 DWARF procedure references in the DW_AT_location attribute. */
8510
8511 static dw_die_ref
8512 copy_dwarf_procedure (dw_die_ref die,
8513 comdat_type_node *type_node,
8514 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8515 {
8516 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8517
8518 /* DWARF procedures are not supposed to have children... */
8519 gcc_assert (die->die_child == NULL);
8520
8521 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8522 gcc_assert (vec_safe_length (die->die_attr) == 1
8523 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8524
8525 /* Do not copy more than once DWARF procedures. */
8526 bool existed;
8527 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8528 if (existed)
8529 return die_copy;
8530
8531 die_copy = clone_die (die);
8532 add_child_die (type_node->root_die, die_copy);
8533 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8534 return die_copy;
8535 }
8536
8537 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8538 procedures in DIE's attributes. */
8539
8540 static void
8541 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8542 comdat_type_node *type_node,
8543 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8544 {
8545 dw_attr_node *a;
8546 unsigned i;
8547
8548 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8549 {
8550 dw_loc_descr_ref loc;
8551
8552 if (a->dw_attr_val.val_class != dw_val_class_loc)
8553 continue;
8554
8555 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8556 {
8557 switch (loc->dw_loc_opc)
8558 {
8559 case DW_OP_call2:
8560 case DW_OP_call4:
8561 case DW_OP_call_ref:
8562 gcc_assert (loc->dw_loc_oprnd1.val_class
8563 == dw_val_class_die_ref);
8564 loc->dw_loc_oprnd1.v.val_die_ref.die
8565 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8566 type_node,
8567 copied_dwarf_procs);
8568
8569 default:
8570 break;
8571 }
8572 }
8573 }
8574 }
8575
8576 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8577 rewrite references to point to the copies.
8578
8579 References are looked for in DIE's attributes and recursively in all its
8580 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8581 mapping from old DWARF procedures to their copy. It is used not to copy
8582 twice the same DWARF procedure under TYPE_NODE. */
8583
8584 static void
8585 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8586 comdat_type_node *type_node,
8587 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8588 {
8589 dw_die_ref c;
8590
8591 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8592 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8593 type_node,
8594 copied_dwarf_procs));
8595 }
8596
8597 /* Traverse the DIE and set up additional .debug_types or .debug_info
8598 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8599 section. */
8600
8601 static void
8602 break_out_comdat_types (dw_die_ref die)
8603 {
8604 dw_die_ref c;
8605 dw_die_ref first;
8606 dw_die_ref prev = NULL;
8607 dw_die_ref next = NULL;
8608 dw_die_ref unit = NULL;
8609
8610 first = c = die->die_child;
8611 if (c)
8612 next = c->die_sib;
8613 if (c) do {
8614 if (prev == NULL || prev->die_sib == c)
8615 prev = c;
8616 c = next;
8617 next = (c == first ? NULL : c->die_sib);
8618 if (should_move_die_to_comdat (c))
8619 {
8620 dw_die_ref replacement;
8621 comdat_type_node *type_node;
8622
8623 /* Break out nested types into their own type units. */
8624 break_out_comdat_types (c);
8625
8626 /* Create a new type unit DIE as the root for the new tree. */
8627 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8628 add_AT_unsigned (unit, DW_AT_language,
8629 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8630
8631 /* Add the new unit's type DIE into the comdat type list. */
8632 type_node = ggc_cleared_alloc<comdat_type_node> ();
8633 type_node->root_die = unit;
8634 type_node->next = comdat_type_list;
8635 comdat_type_list = type_node;
8636
8637 /* Generate the type signature. */
8638 generate_type_signature (c, type_node);
8639
8640 /* Copy the declaration context, attributes, and children of the
8641 declaration into the new type unit DIE, then remove this DIE
8642 from the main CU (or replace it with a skeleton if necessary). */
8643 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8644 type_node->skeleton_die = replacement;
8645
8646 /* Add the DIE to the new compunit. */
8647 add_child_die (unit, c);
8648
8649 /* Types can reference DWARF procedures for type size or data location
8650 expressions. Calls in DWARF expressions cannot target procedures
8651 that are not in the same section. So we must copy DWARF procedures
8652 along with this type and then rewrite references to them. */
8653 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8654 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8655
8656 if (replacement != NULL)
8657 c = replacement;
8658 }
8659 else if (c->die_tag == DW_TAG_namespace
8660 || c->die_tag == DW_TAG_class_type
8661 || c->die_tag == DW_TAG_structure_type
8662 || c->die_tag == DW_TAG_union_type)
8663 {
8664 /* Look for nested types that can be broken out. */
8665 break_out_comdat_types (c);
8666 }
8667 } while (next != NULL);
8668 }
8669
8670 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8671 Enter all the cloned children into the hash table decl_table. */
8672
8673 static dw_die_ref
8674 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8675 {
8676 dw_die_ref c;
8677 dw_die_ref clone;
8678 struct decl_table_entry *entry;
8679 decl_table_entry **slot;
8680
8681 if (die->die_tag == DW_TAG_subprogram)
8682 clone = clone_as_declaration (die);
8683 else
8684 clone = clone_die (die);
8685
8686 slot = decl_table->find_slot_with_hash (die,
8687 htab_hash_pointer (die), INSERT);
8688
8689 /* Assert that DIE isn't in the hash table yet. If it would be there
8690 before, the ancestors would be necessarily there as well, therefore
8691 clone_tree_partial wouldn't be called. */
8692 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8693
8694 entry = XCNEW (struct decl_table_entry);
8695 entry->orig = die;
8696 entry->copy = clone;
8697 *slot = entry;
8698
8699 if (die->die_tag != DW_TAG_subprogram)
8700 FOR_EACH_CHILD (die, c,
8701 add_child_die (clone, clone_tree_partial (c, decl_table)));
8702
8703 return clone;
8704 }
8705
8706 /* Walk the DIE and its children, looking for references to incomplete
8707 or trivial types that are unmarked (i.e., that are not in the current
8708 type_unit). */
8709
8710 static void
8711 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8712 {
8713 dw_die_ref c;
8714 dw_attr_node *a;
8715 unsigned ix;
8716
8717 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8718 {
8719 if (AT_class (a) == dw_val_class_die_ref)
8720 {
8721 dw_die_ref targ = AT_ref (a);
8722 decl_table_entry **slot;
8723 struct decl_table_entry *entry;
8724
8725 if (targ->die_mark != 0 || targ->comdat_type_p)
8726 continue;
8727
8728 slot = decl_table->find_slot_with_hash (targ,
8729 htab_hash_pointer (targ),
8730 INSERT);
8731
8732 if (*slot != HTAB_EMPTY_ENTRY)
8733 {
8734 /* TARG has already been copied, so we just need to
8735 modify the reference to point to the copy. */
8736 entry = *slot;
8737 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8738 }
8739 else
8740 {
8741 dw_die_ref parent = unit;
8742 dw_die_ref copy = clone_die (targ);
8743
8744 /* Record in DECL_TABLE that TARG has been copied.
8745 Need to do this now, before the recursive call,
8746 because DECL_TABLE may be expanded and SLOT
8747 would no longer be a valid pointer. */
8748 entry = XCNEW (struct decl_table_entry);
8749 entry->orig = targ;
8750 entry->copy = copy;
8751 *slot = entry;
8752
8753 /* If TARG is not a declaration DIE, we need to copy its
8754 children. */
8755 if (!is_declaration_die (targ))
8756 {
8757 FOR_EACH_CHILD (
8758 targ, c,
8759 add_child_die (copy,
8760 clone_tree_partial (c, decl_table)));
8761 }
8762
8763 /* Make sure the cloned tree is marked as part of the
8764 type unit. */
8765 mark_dies (copy);
8766
8767 /* If TARG has surrounding context, copy its ancestor tree
8768 into the new type unit. */
8769 if (targ->die_parent != NULL
8770 && !is_unit_die (targ->die_parent))
8771 parent = copy_ancestor_tree (unit, targ->die_parent,
8772 decl_table);
8773
8774 add_child_die (parent, copy);
8775 a->dw_attr_val.v.val_die_ref.die = copy;
8776
8777 /* Make sure the newly-copied DIE is walked. If it was
8778 installed in a previously-added context, it won't
8779 get visited otherwise. */
8780 if (parent != unit)
8781 {
8782 /* Find the highest point of the newly-added tree,
8783 mark each node along the way, and walk from there. */
8784 parent->die_mark = 1;
8785 while (parent->die_parent
8786 && parent->die_parent->die_mark == 0)
8787 {
8788 parent = parent->die_parent;
8789 parent->die_mark = 1;
8790 }
8791 copy_decls_walk (unit, parent, decl_table);
8792 }
8793 }
8794 }
8795 }
8796
8797 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8798 }
8799
8800 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8801 and record them in DECL_TABLE. */
8802
8803 static void
8804 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8805 {
8806 dw_die_ref c;
8807
8808 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8809 {
8810 dw_die_ref targ = AT_ref (a);
8811 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8812 decl_table_entry **slot
8813 = decl_table->find_slot_with_hash (targ,
8814 htab_hash_pointer (targ),
8815 INSERT);
8816 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8817 /* Record in DECL_TABLE that TARG has been already copied
8818 by remove_child_or_replace_with_skeleton. */
8819 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8820 entry->orig = targ;
8821 entry->copy = die;
8822 *slot = entry;
8823 }
8824 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8825 }
8826
8827 /* Copy declarations for "unworthy" types into the new comdat section.
8828 Incomplete types, modified types, and certain other types aren't broken
8829 out into comdat sections of their own, so they don't have a signature,
8830 and we need to copy the declaration into the same section so that we
8831 don't have an external reference. */
8832
8833 static void
8834 copy_decls_for_unworthy_types (dw_die_ref unit)
8835 {
8836 mark_dies (unit);
8837 decl_hash_type decl_table (10);
8838 collect_skeleton_dies (unit, &decl_table);
8839 copy_decls_walk (unit, unit, &decl_table);
8840 unmark_dies (unit);
8841 }
8842
8843 /* Traverse the DIE and add a sibling attribute if it may have the
8844 effect of speeding up access to siblings. To save some space,
8845 avoid generating sibling attributes for DIE's without children. */
8846
8847 static void
8848 add_sibling_attributes (dw_die_ref die)
8849 {
8850 dw_die_ref c;
8851
8852 if (! die->die_child)
8853 return;
8854
8855 if (die->die_parent && die != die->die_parent->die_child)
8856 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8857
8858 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8859 }
8860
8861 /* Output all location lists for the DIE and its children. */
8862
8863 static void
8864 output_location_lists (dw_die_ref die)
8865 {
8866 dw_die_ref c;
8867 dw_attr_node *a;
8868 unsigned ix;
8869
8870 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8871 if (AT_class (a) == dw_val_class_loc_list)
8872 output_loc_list (AT_loc_list (a));
8873
8874 FOR_EACH_CHILD (die, c, output_location_lists (c));
8875 }
8876
8877 /* During assign_location_list_indexes and output_loclists_offset the
8878 current index, after it the number of assigned indexes (i.e. how
8879 large the .debug_loclists* offset table should be). */
8880 static unsigned int loc_list_idx;
8881
8882 /* Output all location list offsets for the DIE and its children. */
8883
8884 static void
8885 output_loclists_offsets (dw_die_ref die)
8886 {
8887 dw_die_ref c;
8888 dw_attr_node *a;
8889 unsigned ix;
8890
8891 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8892 if (AT_class (a) == dw_val_class_loc_list)
8893 {
8894 dw_loc_list_ref l = AT_loc_list (a);
8895 if (l->offset_emitted)
8896 continue;
8897 dw2_asm_output_delta (dwarf_offset_size, l->ll_symbol,
8898 loc_section_label, NULL);
8899 gcc_assert (l->hash == loc_list_idx);
8900 loc_list_idx++;
8901 l->offset_emitted = true;
8902 }
8903
8904 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8905 }
8906
8907 /* Recursively set indexes of location lists. */
8908
8909 static void
8910 assign_location_list_indexes (dw_die_ref die)
8911 {
8912 dw_die_ref c;
8913 dw_attr_node *a;
8914 unsigned ix;
8915
8916 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8917 if (AT_class (a) == dw_val_class_loc_list)
8918 {
8919 dw_loc_list_ref list = AT_loc_list (a);
8920 if (!list->num_assigned)
8921 {
8922 list->num_assigned = true;
8923 list->hash = loc_list_idx++;
8924 }
8925 }
8926
8927 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8928 }
8929
8930 /* We want to limit the number of external references, because they are
8931 larger than local references: a relocation takes multiple words, and
8932 even a sig8 reference is always eight bytes, whereas a local reference
8933 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8934 So if we encounter multiple external references to the same type DIE, we
8935 make a local typedef stub for it and redirect all references there.
8936
8937 This is the element of the hash table for keeping track of these
8938 references. */
8939
8940 struct external_ref
8941 {
8942 dw_die_ref type;
8943 dw_die_ref stub;
8944 unsigned n_refs;
8945 };
8946
8947 /* Hashtable helpers. */
8948
8949 struct external_ref_hasher : free_ptr_hash <external_ref>
8950 {
8951 static inline hashval_t hash (const external_ref *);
8952 static inline bool equal (const external_ref *, const external_ref *);
8953 };
8954
8955 inline hashval_t
8956 external_ref_hasher::hash (const external_ref *r)
8957 {
8958 dw_die_ref die = r->type;
8959 hashval_t h = 0;
8960
8961 /* We can't use the address of the DIE for hashing, because
8962 that will make the order of the stub DIEs non-deterministic. */
8963 if (! die->comdat_type_p)
8964 /* We have a symbol; use it to compute a hash. */
8965 h = htab_hash_string (die->die_id.die_symbol);
8966 else
8967 {
8968 /* We have a type signature; use a subset of the bits as the hash.
8969 The 8-byte signature is at least as large as hashval_t. */
8970 comdat_type_node *type_node = die->die_id.die_type_node;
8971 memcpy (&h, type_node->signature, sizeof (h));
8972 }
8973 return h;
8974 }
8975
8976 inline bool
8977 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8978 {
8979 return r1->type == r2->type;
8980 }
8981
8982 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8983
8984 /* Return a pointer to the external_ref for references to DIE. */
8985
8986 static struct external_ref *
8987 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8988 {
8989 struct external_ref ref, *ref_p;
8990 external_ref **slot;
8991
8992 ref.type = die;
8993 slot = map->find_slot (&ref, INSERT);
8994 if (*slot != HTAB_EMPTY_ENTRY)
8995 return *slot;
8996
8997 ref_p = XCNEW (struct external_ref);
8998 ref_p->type = die;
8999 *slot = ref_p;
9000 return ref_p;
9001 }
9002
9003 /* Subroutine of optimize_external_refs, below.
9004
9005 If we see a type skeleton, record it as our stub. If we see external
9006 references, remember how many we've seen. */
9007
9008 static void
9009 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
9010 {
9011 dw_die_ref c;
9012 dw_attr_node *a;
9013 unsigned ix;
9014 struct external_ref *ref_p;
9015
9016 if (is_type_die (die)
9017 && (c = get_AT_ref (die, DW_AT_signature)))
9018 {
9019 /* This is a local skeleton; use it for local references. */
9020 ref_p = lookup_external_ref (map, c);
9021 ref_p->stub = die;
9022 }
9023
9024 /* Scan the DIE references, and remember any that refer to DIEs from
9025 other CUs (i.e. those which are not marked). */
9026 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9027 if (AT_class (a) == dw_val_class_die_ref
9028 && (c = AT_ref (a))->die_mark == 0
9029 && is_type_die (c))
9030 {
9031 ref_p = lookup_external_ref (map, c);
9032 ref_p->n_refs++;
9033 }
9034
9035 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
9036 }
9037
9038 /* htab_traverse callback function for optimize_external_refs, below. SLOT
9039 points to an external_ref, DATA is the CU we're processing. If we don't
9040 already have a local stub, and we have multiple refs, build a stub. */
9041
9042 int
9043 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
9044 {
9045 struct external_ref *ref_p = *slot;
9046
9047 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9048 {
9049 /* We have multiple references to this type, so build a small stub.
9050 Both of these forms are a bit dodgy from the perspective of the
9051 DWARF standard, since technically they should have names. */
9052 dw_die_ref cu = data;
9053 dw_die_ref type = ref_p->type;
9054 dw_die_ref stub = NULL;
9055
9056 if (type->comdat_type_p)
9057 {
9058 /* If we refer to this type via sig8, use AT_signature. */
9059 stub = new_die (type->die_tag, cu, NULL_TREE);
9060 add_AT_die_ref (stub, DW_AT_signature, type);
9061 }
9062 else
9063 {
9064 /* Otherwise, use a typedef with no name. */
9065 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9066 add_AT_die_ref (stub, DW_AT_type, type);
9067 }
9068
9069 stub->die_mark++;
9070 ref_p->stub = stub;
9071 }
9072 return 1;
9073 }
9074
9075 /* DIE is a unit; look through all the DIE references to see if there are
9076 any external references to types, and if so, create local stubs for
9077 them which will be applied in build_abbrev_table. This is useful because
9078 references to local DIEs are smaller. */
9079
9080 static external_ref_hash_type *
9081 optimize_external_refs (dw_die_ref die)
9082 {
9083 external_ref_hash_type *map = new external_ref_hash_type (10);
9084 optimize_external_refs_1 (die, map);
9085 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9086 return map;
9087 }
9088
9089 /* The following 3 variables are temporaries that are computed only during the
9090 build_abbrev_table call and used and released during the following
9091 optimize_abbrev_table call. */
9092
9093 /* First abbrev_id that can be optimized based on usage. */
9094 static unsigned int abbrev_opt_start;
9095
9096 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9097 abbrev_id smaller than this, because they must be already sized
9098 during build_abbrev_table). */
9099 static unsigned int abbrev_opt_base_type_end;
9100
9101 /* Vector of usage counts during build_abbrev_table. Indexed by
9102 abbrev_id - abbrev_opt_start. */
9103 static vec<unsigned int> abbrev_usage_count;
9104
9105 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9106 static vec<dw_die_ref> sorted_abbrev_dies;
9107
9108 /* The format of each DIE (and its attribute value pairs) is encoded in an
9109 abbreviation table. This routine builds the abbreviation table and assigns
9110 a unique abbreviation id for each abbreviation entry. The children of each
9111 die are visited recursively. */
9112
9113 static void
9114 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9115 {
9116 unsigned int abbrev_id = 0;
9117 dw_die_ref c;
9118 dw_attr_node *a;
9119 unsigned ix;
9120 dw_die_ref abbrev;
9121
9122 /* Scan the DIE references, and replace any that refer to
9123 DIEs from other CUs (i.e. those which are not marked) with
9124 the local stubs we built in optimize_external_refs. */
9125 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9126 if (AT_class (a) == dw_val_class_die_ref
9127 && (c = AT_ref (a))->die_mark == 0)
9128 {
9129 struct external_ref *ref_p;
9130 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9131
9132 if (is_type_die (c)
9133 && (ref_p = lookup_external_ref (extern_map, c))
9134 && ref_p->stub && ref_p->stub != die)
9135 {
9136 gcc_assert (a->dw_attr != DW_AT_signature);
9137 change_AT_die_ref (a, ref_p->stub);
9138 }
9139 else
9140 /* We aren't changing this reference, so mark it external. */
9141 set_AT_ref_external (a, 1);
9142 }
9143
9144 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9145 {
9146 dw_attr_node *die_a, *abbrev_a;
9147 unsigned ix;
9148 bool ok = true;
9149
9150 if (abbrev_id == 0)
9151 continue;
9152 if (abbrev->die_tag != die->die_tag)
9153 continue;
9154 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9155 continue;
9156
9157 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9158 continue;
9159
9160 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9161 {
9162 abbrev_a = &(*abbrev->die_attr)[ix];
9163 if ((abbrev_a->dw_attr != die_a->dw_attr)
9164 || (value_format (abbrev_a) != value_format (die_a)))
9165 {
9166 ok = false;
9167 break;
9168 }
9169 }
9170 if (ok)
9171 break;
9172 }
9173
9174 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9175 {
9176 vec_safe_push (abbrev_die_table, die);
9177 if (abbrev_opt_start)
9178 abbrev_usage_count.safe_push (0);
9179 }
9180 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9181 {
9182 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9183 sorted_abbrev_dies.safe_push (die);
9184 }
9185
9186 die->die_abbrev = abbrev_id;
9187 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9188 }
9189
9190 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9191 by die_abbrev's usage count, from the most commonly used
9192 abbreviation to the least. */
9193
9194 static int
9195 die_abbrev_cmp (const void *p1, const void *p2)
9196 {
9197 dw_die_ref die1 = *(const dw_die_ref *) p1;
9198 dw_die_ref die2 = *(const dw_die_ref *) p2;
9199
9200 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9201 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9202
9203 if (die1->die_abbrev >= abbrev_opt_base_type_end
9204 && die2->die_abbrev >= abbrev_opt_base_type_end)
9205 {
9206 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9207 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9208 return -1;
9209 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9210 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9211 return 1;
9212 }
9213
9214 /* Stabilize the sort. */
9215 if (die1->die_abbrev < die2->die_abbrev)
9216 return -1;
9217 if (die1->die_abbrev > die2->die_abbrev)
9218 return 1;
9219
9220 return 0;
9221 }
9222
9223 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9224 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9225 into dw_val_class_const_implicit or
9226 dw_val_class_unsigned_const_implicit. */
9227
9228 static void
9229 optimize_implicit_const (unsigned int first_id, unsigned int end,
9230 vec<bool> &implicit_consts)
9231 {
9232 /* It never makes sense if there is just one DIE using the abbreviation. */
9233 if (end < first_id + 2)
9234 return;
9235
9236 dw_attr_node *a;
9237 unsigned ix, i;
9238 dw_die_ref die = sorted_abbrev_dies[first_id];
9239 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9240 if (implicit_consts[ix])
9241 {
9242 enum dw_val_class new_class = dw_val_class_none;
9243 switch (AT_class (a))
9244 {
9245 case dw_val_class_unsigned_const:
9246 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9247 continue;
9248
9249 /* The .debug_abbrev section will grow by
9250 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9251 in all the DIEs using that abbreviation. */
9252 if (constant_size (AT_unsigned (a)) * (end - first_id)
9253 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9254 continue;
9255
9256 new_class = dw_val_class_unsigned_const_implicit;
9257 break;
9258
9259 case dw_val_class_const:
9260 new_class = dw_val_class_const_implicit;
9261 break;
9262
9263 case dw_val_class_file:
9264 new_class = dw_val_class_file_implicit;
9265 break;
9266
9267 default:
9268 continue;
9269 }
9270 for (i = first_id; i < end; i++)
9271 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9272 = new_class;
9273 }
9274 }
9275
9276 /* Attempt to optimize abbreviation table from abbrev_opt_start
9277 abbreviation above. */
9278
9279 static void
9280 optimize_abbrev_table (void)
9281 {
9282 if (abbrev_opt_start
9283 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9284 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9285 {
9286 auto_vec<bool, 32> implicit_consts;
9287 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9288
9289 unsigned int abbrev_id = abbrev_opt_start - 1;
9290 unsigned int first_id = ~0U;
9291 unsigned int last_abbrev_id = 0;
9292 unsigned int i;
9293 dw_die_ref die;
9294 if (abbrev_opt_base_type_end > abbrev_opt_start)
9295 abbrev_id = abbrev_opt_base_type_end - 1;
9296 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9297 most commonly used abbreviations come first. */
9298 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9299 {
9300 dw_attr_node *a;
9301 unsigned ix;
9302
9303 /* If calc_base_type_die_sizes has been called, the CU and
9304 base types after it can't be optimized, because we've already
9305 calculated their DIE offsets. We've sorted them first. */
9306 if (die->die_abbrev < abbrev_opt_base_type_end)
9307 continue;
9308 if (die->die_abbrev != last_abbrev_id)
9309 {
9310 last_abbrev_id = die->die_abbrev;
9311 if (dwarf_version >= 5 && first_id != ~0U)
9312 optimize_implicit_const (first_id, i, implicit_consts);
9313 abbrev_id++;
9314 (*abbrev_die_table)[abbrev_id] = die;
9315 if (dwarf_version >= 5)
9316 {
9317 first_id = i;
9318 implicit_consts.truncate (0);
9319
9320 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9321 switch (AT_class (a))
9322 {
9323 case dw_val_class_const:
9324 case dw_val_class_unsigned_const:
9325 case dw_val_class_file:
9326 implicit_consts.safe_push (true);
9327 break;
9328 default:
9329 implicit_consts.safe_push (false);
9330 break;
9331 }
9332 }
9333 }
9334 else if (dwarf_version >= 5)
9335 {
9336 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9337 if (!implicit_consts[ix])
9338 continue;
9339 else
9340 {
9341 dw_attr_node *other_a
9342 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9343 if (!dw_val_equal_p (&a->dw_attr_val,
9344 &other_a->dw_attr_val))
9345 implicit_consts[ix] = false;
9346 }
9347 }
9348 die->die_abbrev = abbrev_id;
9349 }
9350 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9351 if (dwarf_version >= 5 && first_id != ~0U)
9352 optimize_implicit_const (first_id, i, implicit_consts);
9353 }
9354
9355 abbrev_opt_start = 0;
9356 abbrev_opt_base_type_end = 0;
9357 abbrev_usage_count.release ();
9358 sorted_abbrev_dies.release ();
9359 }
9360 \f
9361 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9362
9363 static int
9364 constant_size (unsigned HOST_WIDE_INT value)
9365 {
9366 int log;
9367
9368 if (value == 0)
9369 log = 0;
9370 else
9371 log = floor_log2 (value);
9372
9373 log = log / 8;
9374 log = 1 << (floor_log2 (log) + 1);
9375
9376 return log;
9377 }
9378
9379 /* Return the size of a DIE as it is represented in the
9380 .debug_info section. */
9381
9382 static unsigned long
9383 size_of_die (dw_die_ref die)
9384 {
9385 unsigned long size = 0;
9386 dw_attr_node *a;
9387 unsigned ix;
9388 enum dwarf_form form;
9389
9390 size += size_of_uleb128 (die->die_abbrev);
9391 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9392 {
9393 switch (AT_class (a))
9394 {
9395 case dw_val_class_addr:
9396 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9397 {
9398 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9399 size += size_of_uleb128 (AT_index (a));
9400 }
9401 else
9402 size += DWARF2_ADDR_SIZE;
9403 break;
9404 case dw_val_class_offset:
9405 size += dwarf_offset_size;
9406 break;
9407 case dw_val_class_loc:
9408 {
9409 unsigned long lsize = size_of_locs (AT_loc (a));
9410
9411 /* Block length. */
9412 if (dwarf_version >= 4)
9413 size += size_of_uleb128 (lsize);
9414 else
9415 size += constant_size (lsize);
9416 size += lsize;
9417 }
9418 break;
9419 case dw_val_class_loc_list:
9420 if (dwarf_split_debug_info && dwarf_version >= 5)
9421 {
9422 gcc_assert (AT_loc_list (a)->num_assigned);
9423 size += size_of_uleb128 (AT_loc_list (a)->hash);
9424 }
9425 else
9426 size += dwarf_offset_size;
9427 break;
9428 case dw_val_class_view_list:
9429 size += dwarf_offset_size;
9430 break;
9431 case dw_val_class_range_list:
9432 if (value_format (a) == DW_FORM_rnglistx)
9433 {
9434 gcc_assert (rnglist_idx);
9435 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9436 size += size_of_uleb128 (r->idx);
9437 }
9438 else
9439 size += dwarf_offset_size;
9440 break;
9441 case dw_val_class_const:
9442 size += size_of_sleb128 (AT_int (a));
9443 break;
9444 case dw_val_class_unsigned_const:
9445 {
9446 int csize = constant_size (AT_unsigned (a));
9447 if (dwarf_version == 3
9448 && a->dw_attr == DW_AT_data_member_location
9449 && csize >= 4)
9450 size += size_of_uleb128 (AT_unsigned (a));
9451 else
9452 size += csize;
9453 }
9454 break;
9455 case dw_val_class_symview:
9456 if (symview_upper_bound <= 0xff)
9457 size += 1;
9458 else if (symview_upper_bound <= 0xffff)
9459 size += 2;
9460 else if (symview_upper_bound <= 0xffffffff)
9461 size += 4;
9462 else
9463 size += 8;
9464 break;
9465 case dw_val_class_const_implicit:
9466 case dw_val_class_unsigned_const_implicit:
9467 case dw_val_class_file_implicit:
9468 /* These occupy no size in the DIE, just an extra sleb128 in
9469 .debug_abbrev. */
9470 break;
9471 case dw_val_class_const_double:
9472 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9473 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9474 size++; /* block */
9475 break;
9476 case dw_val_class_wide_int:
9477 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9478 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9479 if (get_full_len (*a->dw_attr_val.v.val_wide)
9480 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9481 size++; /* block */
9482 break;
9483 case dw_val_class_vec:
9484 size += constant_size (a->dw_attr_val.v.val_vec.length
9485 * a->dw_attr_val.v.val_vec.elt_size)
9486 + a->dw_attr_val.v.val_vec.length
9487 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9488 break;
9489 case dw_val_class_flag:
9490 if (dwarf_version >= 4)
9491 /* Currently all add_AT_flag calls pass in 1 as last argument,
9492 so DW_FORM_flag_present can be used. If that ever changes,
9493 we'll need to use DW_FORM_flag and have some optimization
9494 in build_abbrev_table that will change those to
9495 DW_FORM_flag_present if it is set to 1 in all DIEs using
9496 the same abbrev entry. */
9497 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9498 else
9499 size += 1;
9500 break;
9501 case dw_val_class_die_ref:
9502 if (AT_ref_external (a))
9503 {
9504 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9505 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9506 is sized by target address length, whereas in DWARF3
9507 it's always sized as an offset. */
9508 if (AT_ref (a)->comdat_type_p)
9509 size += DWARF_TYPE_SIGNATURE_SIZE;
9510 else if (dwarf_version == 2)
9511 size += DWARF2_ADDR_SIZE;
9512 else
9513 size += dwarf_offset_size;
9514 }
9515 else
9516 size += dwarf_offset_size;
9517 break;
9518 case dw_val_class_fde_ref:
9519 size += dwarf_offset_size;
9520 break;
9521 case dw_val_class_lbl_id:
9522 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9523 {
9524 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9525 size += size_of_uleb128 (AT_index (a));
9526 }
9527 else
9528 size += DWARF2_ADDR_SIZE;
9529 break;
9530 case dw_val_class_lineptr:
9531 case dw_val_class_macptr:
9532 case dw_val_class_loclistsptr:
9533 size += dwarf_offset_size;
9534 break;
9535 case dw_val_class_str:
9536 form = AT_string_form (a);
9537 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9538 size += dwarf_offset_size;
9539 else if (form == dwarf_FORM (DW_FORM_strx))
9540 size += size_of_uleb128 (AT_index (a));
9541 else
9542 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9543 break;
9544 case dw_val_class_file:
9545 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9546 break;
9547 case dw_val_class_data8:
9548 size += 8;
9549 break;
9550 case dw_val_class_vms_delta:
9551 size += dwarf_offset_size;
9552 break;
9553 case dw_val_class_high_pc:
9554 size += DWARF2_ADDR_SIZE;
9555 break;
9556 case dw_val_class_discr_value:
9557 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9558 break;
9559 case dw_val_class_discr_list:
9560 {
9561 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9562
9563 /* This is a block, so we have the block length and then its
9564 data. */
9565 size += constant_size (block_size) + block_size;
9566 }
9567 break;
9568 default:
9569 gcc_unreachable ();
9570 }
9571 }
9572
9573 return size;
9574 }
9575
9576 /* Size the debugging information associated with a given DIE. Visits the
9577 DIE's children recursively. Updates the global variable next_die_offset, on
9578 each time through. Uses the current value of next_die_offset to update the
9579 die_offset field in each DIE. */
9580
9581 static void
9582 calc_die_sizes (dw_die_ref die)
9583 {
9584 dw_die_ref c;
9585
9586 gcc_assert (die->die_offset == 0
9587 || (unsigned long int) die->die_offset == next_die_offset);
9588 die->die_offset = next_die_offset;
9589 next_die_offset += size_of_die (die);
9590
9591 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9592
9593 if (die->die_child != NULL)
9594 /* Count the null byte used to terminate sibling lists. */
9595 next_die_offset += 1;
9596 }
9597
9598 /* Size just the base type children at the start of the CU.
9599 This is needed because build_abbrev needs to size locs
9600 and sizing of type based stack ops needs to know die_offset
9601 values for the base types. */
9602
9603 static void
9604 calc_base_type_die_sizes (void)
9605 {
9606 unsigned long die_offset = (dwarf_split_debug_info
9607 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9608 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9609 unsigned int i;
9610 dw_die_ref base_type;
9611 #if ENABLE_ASSERT_CHECKING
9612 dw_die_ref prev = comp_unit_die ()->die_child;
9613 #endif
9614
9615 die_offset += size_of_die (comp_unit_die ());
9616 for (i = 0; base_types.iterate (i, &base_type); i++)
9617 {
9618 #if ENABLE_ASSERT_CHECKING
9619 gcc_assert (base_type->die_offset == 0
9620 && prev->die_sib == base_type
9621 && base_type->die_child == NULL
9622 && base_type->die_abbrev);
9623 prev = base_type;
9624 #endif
9625 if (abbrev_opt_start
9626 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9627 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9628 base_type->die_offset = die_offset;
9629 die_offset += size_of_die (base_type);
9630 }
9631 }
9632
9633 /* Set the marks for a die and its children. We do this so
9634 that we know whether or not a reference needs to use FORM_ref_addr; only
9635 DIEs in the same CU will be marked. We used to clear out the offset
9636 and use that as the flag, but ran into ordering problems. */
9637
9638 static void
9639 mark_dies (dw_die_ref die)
9640 {
9641 dw_die_ref c;
9642
9643 gcc_assert (!die->die_mark);
9644
9645 die->die_mark = 1;
9646 FOR_EACH_CHILD (die, c, mark_dies (c));
9647 }
9648
9649 /* Clear the marks for a die and its children. */
9650
9651 static void
9652 unmark_dies (dw_die_ref die)
9653 {
9654 dw_die_ref c;
9655
9656 if (! use_debug_types)
9657 gcc_assert (die->die_mark);
9658
9659 die->die_mark = 0;
9660 FOR_EACH_CHILD (die, c, unmark_dies (c));
9661 }
9662
9663 /* Clear the marks for a die, its children and referred dies. */
9664
9665 static void
9666 unmark_all_dies (dw_die_ref die)
9667 {
9668 dw_die_ref c;
9669 dw_attr_node *a;
9670 unsigned ix;
9671
9672 if (!die->die_mark)
9673 return;
9674 die->die_mark = 0;
9675
9676 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9677
9678 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9679 if (AT_class (a) == dw_val_class_die_ref)
9680 unmark_all_dies (AT_ref (a));
9681 }
9682
9683 /* Calculate if the entry should appear in the final output file. It may be
9684 from a pruned a type. */
9685
9686 static bool
9687 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9688 {
9689 /* By limiting gnu pubnames to definitions only, gold can generate a
9690 gdb index without entries for declarations, which don't include
9691 enough information to be useful. */
9692 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9693 return false;
9694
9695 if (table == pubname_table)
9696 {
9697 /* Enumerator names are part of the pubname table, but the
9698 parent DW_TAG_enumeration_type die may have been pruned.
9699 Don't output them if that is the case. */
9700 if (p->die->die_tag == DW_TAG_enumerator &&
9701 (p->die->die_parent == NULL
9702 || !p->die->die_parent->die_perennial_p))
9703 return false;
9704
9705 /* Everything else in the pubname table is included. */
9706 return true;
9707 }
9708
9709 /* The pubtypes table shouldn't include types that have been
9710 pruned. */
9711 return (p->die->die_offset != 0
9712 || !flag_eliminate_unused_debug_types);
9713 }
9714
9715 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9716 generated for the compilation unit. */
9717
9718 static unsigned long
9719 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9720 {
9721 unsigned long size;
9722 unsigned i;
9723 pubname_entry *p;
9724 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9725
9726 size = DWARF_PUBNAMES_HEADER_SIZE;
9727 FOR_EACH_VEC_ELT (*names, i, p)
9728 if (include_pubname_in_output (names, p))
9729 size += strlen (p->name) + dwarf_offset_size + 1 + space_for_flags;
9730
9731 size += dwarf_offset_size;
9732 return size;
9733 }
9734
9735 /* Return the size of the information in the .debug_aranges section. */
9736
9737 static unsigned long
9738 size_of_aranges (void)
9739 {
9740 unsigned long size;
9741
9742 size = DWARF_ARANGES_HEADER_SIZE;
9743
9744 /* Count the address/length pair for this compilation unit. */
9745 if (text_section_used)
9746 size += 2 * DWARF2_ADDR_SIZE;
9747 if (cold_text_section_used)
9748 size += 2 * DWARF2_ADDR_SIZE;
9749 if (have_multiple_function_sections)
9750 {
9751 unsigned fde_idx;
9752 dw_fde_ref fde;
9753
9754 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9755 {
9756 if (DECL_IGNORED_P (fde->decl))
9757 continue;
9758 if (!fde->in_std_section)
9759 size += 2 * DWARF2_ADDR_SIZE;
9760 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9761 size += 2 * DWARF2_ADDR_SIZE;
9762 }
9763 }
9764
9765 /* Count the two zero words used to terminated the address range table. */
9766 size += 2 * DWARF2_ADDR_SIZE;
9767 return size;
9768 }
9769 \f
9770 /* Select the encoding of an attribute value. */
9771
9772 static enum dwarf_form
9773 value_format (dw_attr_node *a)
9774 {
9775 switch (AT_class (a))
9776 {
9777 case dw_val_class_addr:
9778 /* Only very few attributes allow DW_FORM_addr. */
9779 switch (a->dw_attr)
9780 {
9781 case DW_AT_low_pc:
9782 case DW_AT_high_pc:
9783 case DW_AT_entry_pc:
9784 case DW_AT_trampoline:
9785 return (AT_index (a) == NOT_INDEXED
9786 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9787 default:
9788 break;
9789 }
9790 switch (DWARF2_ADDR_SIZE)
9791 {
9792 case 1:
9793 return DW_FORM_data1;
9794 case 2:
9795 return DW_FORM_data2;
9796 case 4:
9797 return DW_FORM_data4;
9798 case 8:
9799 return DW_FORM_data8;
9800 default:
9801 gcc_unreachable ();
9802 }
9803 case dw_val_class_loc_list:
9804 if (dwarf_split_debug_info
9805 && dwarf_version >= 5
9806 && AT_loc_list (a)->num_assigned)
9807 return DW_FORM_loclistx;
9808 /* FALLTHRU */
9809 case dw_val_class_view_list:
9810 case dw_val_class_range_list:
9811 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9812 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9813 care about sizes of .debug* sections in shared libraries and
9814 executables and don't take into account relocations that affect just
9815 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9816 table in the .debug_rnglists section. */
9817 if (dwarf_split_debug_info
9818 && dwarf_version >= 5
9819 && AT_class (a) == dw_val_class_range_list
9820 && rnglist_idx
9821 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9822 return DW_FORM_rnglistx;
9823 if (dwarf_version >= 4)
9824 return DW_FORM_sec_offset;
9825 /* FALLTHRU */
9826 case dw_val_class_vms_delta:
9827 case dw_val_class_offset:
9828 switch (dwarf_offset_size)
9829 {
9830 case 4:
9831 return DW_FORM_data4;
9832 case 8:
9833 return DW_FORM_data8;
9834 default:
9835 gcc_unreachable ();
9836 }
9837 case dw_val_class_loc:
9838 if (dwarf_version >= 4)
9839 return DW_FORM_exprloc;
9840 switch (constant_size (size_of_locs (AT_loc (a))))
9841 {
9842 case 1:
9843 return DW_FORM_block1;
9844 case 2:
9845 return DW_FORM_block2;
9846 case 4:
9847 return DW_FORM_block4;
9848 default:
9849 gcc_unreachable ();
9850 }
9851 case dw_val_class_const:
9852 return DW_FORM_sdata;
9853 case dw_val_class_unsigned_const:
9854 switch (constant_size (AT_unsigned (a)))
9855 {
9856 case 1:
9857 return DW_FORM_data1;
9858 case 2:
9859 return DW_FORM_data2;
9860 case 4:
9861 /* In DWARF3 DW_AT_data_member_location with
9862 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9863 constant, so we need to use DW_FORM_udata if we need
9864 a large constant. */
9865 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9866 return DW_FORM_udata;
9867 return DW_FORM_data4;
9868 case 8:
9869 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9870 return DW_FORM_udata;
9871 return DW_FORM_data8;
9872 default:
9873 gcc_unreachable ();
9874 }
9875 case dw_val_class_const_implicit:
9876 case dw_val_class_unsigned_const_implicit:
9877 case dw_val_class_file_implicit:
9878 return DW_FORM_implicit_const;
9879 case dw_val_class_const_double:
9880 switch (HOST_BITS_PER_WIDE_INT)
9881 {
9882 case 8:
9883 return DW_FORM_data2;
9884 case 16:
9885 return DW_FORM_data4;
9886 case 32:
9887 return DW_FORM_data8;
9888 case 64:
9889 if (dwarf_version >= 5)
9890 return DW_FORM_data16;
9891 /* FALLTHRU */
9892 default:
9893 return DW_FORM_block1;
9894 }
9895 case dw_val_class_wide_int:
9896 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9897 {
9898 case 8:
9899 return DW_FORM_data1;
9900 case 16:
9901 return DW_FORM_data2;
9902 case 32:
9903 return DW_FORM_data4;
9904 case 64:
9905 return DW_FORM_data8;
9906 case 128:
9907 if (dwarf_version >= 5)
9908 return DW_FORM_data16;
9909 /* FALLTHRU */
9910 default:
9911 return DW_FORM_block1;
9912 }
9913 case dw_val_class_symview:
9914 /* ??? We might use uleb128, but then we'd have to compute
9915 .debug_info offsets in the assembler. */
9916 if (symview_upper_bound <= 0xff)
9917 return DW_FORM_data1;
9918 else if (symview_upper_bound <= 0xffff)
9919 return DW_FORM_data2;
9920 else if (symview_upper_bound <= 0xffffffff)
9921 return DW_FORM_data4;
9922 else
9923 return DW_FORM_data8;
9924 case dw_val_class_vec:
9925 switch (constant_size (a->dw_attr_val.v.val_vec.length
9926 * a->dw_attr_val.v.val_vec.elt_size))
9927 {
9928 case 1:
9929 return DW_FORM_block1;
9930 case 2:
9931 return DW_FORM_block2;
9932 case 4:
9933 return DW_FORM_block4;
9934 default:
9935 gcc_unreachable ();
9936 }
9937 case dw_val_class_flag:
9938 if (dwarf_version >= 4)
9939 {
9940 /* Currently all add_AT_flag calls pass in 1 as last argument,
9941 so DW_FORM_flag_present can be used. If that ever changes,
9942 we'll need to use DW_FORM_flag and have some optimization
9943 in build_abbrev_table that will change those to
9944 DW_FORM_flag_present if it is set to 1 in all DIEs using
9945 the same abbrev entry. */
9946 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9947 return DW_FORM_flag_present;
9948 }
9949 return DW_FORM_flag;
9950 case dw_val_class_die_ref:
9951 if (AT_ref_external (a))
9952 {
9953 if (AT_ref (a)->comdat_type_p)
9954 return DW_FORM_ref_sig8;
9955 else
9956 return DW_FORM_ref_addr;
9957 }
9958 else
9959 return DW_FORM_ref;
9960 case dw_val_class_fde_ref:
9961 return DW_FORM_data;
9962 case dw_val_class_lbl_id:
9963 return (AT_index (a) == NOT_INDEXED
9964 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9965 case dw_val_class_lineptr:
9966 case dw_val_class_macptr:
9967 case dw_val_class_loclistsptr:
9968 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9969 case dw_val_class_str:
9970 return AT_string_form (a);
9971 case dw_val_class_file:
9972 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9973 {
9974 case 1:
9975 return DW_FORM_data1;
9976 case 2:
9977 return DW_FORM_data2;
9978 case 4:
9979 return DW_FORM_data4;
9980 default:
9981 gcc_unreachable ();
9982 }
9983
9984 case dw_val_class_data8:
9985 return DW_FORM_data8;
9986
9987 case dw_val_class_high_pc:
9988 switch (DWARF2_ADDR_SIZE)
9989 {
9990 case 1:
9991 return DW_FORM_data1;
9992 case 2:
9993 return DW_FORM_data2;
9994 case 4:
9995 return DW_FORM_data4;
9996 case 8:
9997 return DW_FORM_data8;
9998 default:
9999 gcc_unreachable ();
10000 }
10001
10002 case dw_val_class_discr_value:
10003 return (a->dw_attr_val.v.val_discr_value.pos
10004 ? DW_FORM_udata
10005 : DW_FORM_sdata);
10006 case dw_val_class_discr_list:
10007 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
10008 {
10009 case 1:
10010 return DW_FORM_block1;
10011 case 2:
10012 return DW_FORM_block2;
10013 case 4:
10014 return DW_FORM_block4;
10015 default:
10016 gcc_unreachable ();
10017 }
10018
10019 default:
10020 gcc_unreachable ();
10021 }
10022 }
10023
10024 /* Output the encoding of an attribute value. */
10025
10026 static void
10027 output_value_format (dw_attr_node *a)
10028 {
10029 enum dwarf_form form = value_format (a);
10030
10031 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
10032 }
10033
10034 /* Given a die and id, produce the appropriate abbreviations. */
10035
10036 static void
10037 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
10038 {
10039 unsigned ix;
10040 dw_attr_node *a_attr;
10041
10042 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
10043 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
10044 dwarf_tag_name (abbrev->die_tag));
10045
10046 if (abbrev->die_child != NULL)
10047 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10048 else
10049 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10050
10051 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10052 {
10053 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10054 dwarf_attr_name (a_attr->dw_attr));
10055 output_value_format (a_attr);
10056 if (value_format (a_attr) == DW_FORM_implicit_const)
10057 {
10058 if (AT_class (a_attr) == dw_val_class_file_implicit)
10059 {
10060 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10061 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10062 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10063 }
10064 else
10065 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10066 }
10067 }
10068
10069 dw2_asm_output_data (1, 0, NULL);
10070 dw2_asm_output_data (1, 0, NULL);
10071 }
10072
10073
10074 /* Output the .debug_abbrev section which defines the DIE abbreviation
10075 table. */
10076
10077 static void
10078 output_abbrev_section (void)
10079 {
10080 unsigned int abbrev_id;
10081 dw_die_ref abbrev;
10082
10083 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10084 if (abbrev_id != 0)
10085 output_die_abbrevs (abbrev_id, abbrev);
10086
10087 /* Terminate the table. */
10088 dw2_asm_output_data (1, 0, NULL);
10089 }
10090
10091 /* Return a new location list, given the begin and end range, and the
10092 expression. */
10093
10094 static inline dw_loc_list_ref
10095 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10096 const char *end, var_loc_view vend,
10097 const char *section)
10098 {
10099 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10100
10101 retlist->begin = begin;
10102 retlist->begin_entry = NULL;
10103 retlist->end = end;
10104 retlist->expr = expr;
10105 retlist->section = section;
10106 retlist->vbegin = vbegin;
10107 retlist->vend = vend;
10108
10109 return retlist;
10110 }
10111
10112 /* Return true iff there's any nonzero view number in the loc list.
10113
10114 ??? When views are not enabled, we'll often extend a single range
10115 to the entire function, so that we emit a single location
10116 expression rather than a location list. With views, even with a
10117 single range, we'll output a list if start or end have a nonzero
10118 view. If we change this, we may want to stop splitting a single
10119 range in dw_loc_list just because of a nonzero view, even if it
10120 straddles across hot/cold partitions. */
10121
10122 static bool
10123 loc_list_has_views (dw_loc_list_ref list)
10124 {
10125 if (!debug_variable_location_views)
10126 return false;
10127
10128 for (dw_loc_list_ref loc = list;
10129 loc != NULL; loc = loc->dw_loc_next)
10130 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10131 return true;
10132
10133 return false;
10134 }
10135
10136 /* Generate a new internal symbol for this location list node, if it
10137 hasn't got one yet. */
10138
10139 static inline void
10140 gen_llsym (dw_loc_list_ref list)
10141 {
10142 gcc_assert (!list->ll_symbol);
10143 list->ll_symbol = gen_internal_sym ("LLST");
10144
10145 if (!loc_list_has_views (list))
10146 return;
10147
10148 if (dwarf2out_locviews_in_attribute ())
10149 {
10150 /* Use the same label_num for the view list. */
10151 label_num--;
10152 list->vl_symbol = gen_internal_sym ("LVUS");
10153 }
10154 else
10155 list->vl_symbol = list->ll_symbol;
10156 }
10157
10158 /* Generate a symbol for the list, but only if we really want to emit
10159 it as a list. */
10160
10161 static inline void
10162 maybe_gen_llsym (dw_loc_list_ref list)
10163 {
10164 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10165 return;
10166
10167 gen_llsym (list);
10168 }
10169
10170 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10171 NULL, don't consider size of the location expression. If we're not
10172 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10173 representation in *SIZEP. */
10174
10175 static bool
10176 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10177 {
10178 /* Don't output an entry that starts and ends at the same address. */
10179 if (strcmp (curr->begin, curr->end) == 0
10180 && curr->vbegin == curr->vend && !curr->force)
10181 return true;
10182
10183 if (!sizep)
10184 return false;
10185
10186 unsigned long size = size_of_locs (curr->expr);
10187
10188 /* If the expression is too large, drop it on the floor. We could
10189 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10190 in the expression, but >= 64KB expressions for a single value
10191 in a single range are unlikely very useful. */
10192 if (dwarf_version < 5 && size > 0xffff)
10193 return true;
10194
10195 *sizep = size;
10196
10197 return false;
10198 }
10199
10200 /* Output a view pair loclist entry for CURR, if it requires one. */
10201
10202 static void
10203 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10204 {
10205 if (!dwarf2out_locviews_in_loclist ())
10206 return;
10207
10208 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10209 return;
10210
10211 #ifdef DW_LLE_view_pair
10212 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10213
10214 if (dwarf2out_as_locview_support)
10215 {
10216 if (ZERO_VIEW_P (curr->vbegin))
10217 dw2_asm_output_data_uleb128 (0, "Location view begin");
10218 else
10219 {
10220 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10221 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10222 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10223 }
10224
10225 if (ZERO_VIEW_P (curr->vend))
10226 dw2_asm_output_data_uleb128 (0, "Location view end");
10227 else
10228 {
10229 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10230 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10231 dw2_asm_output_symname_uleb128 (label, "Location view end");
10232 }
10233 }
10234 else
10235 {
10236 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10237 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10238 }
10239 #endif /* DW_LLE_view_pair */
10240
10241 return;
10242 }
10243
10244 /* Output the location list given to us. */
10245
10246 static void
10247 output_loc_list (dw_loc_list_ref list_head)
10248 {
10249 int vcount = 0, lcount = 0;
10250
10251 if (list_head->emitted)
10252 return;
10253 list_head->emitted = true;
10254
10255 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10256 {
10257 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10258
10259 for (dw_loc_list_ref curr = list_head; curr != NULL;
10260 curr = curr->dw_loc_next)
10261 {
10262 unsigned long size;
10263
10264 if (skip_loc_list_entry (curr, &size))
10265 continue;
10266
10267 vcount++;
10268
10269 /* ?? dwarf_split_debug_info? */
10270 if (dwarf2out_as_locview_support)
10271 {
10272 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10273
10274 if (!ZERO_VIEW_P (curr->vbegin))
10275 {
10276 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10277 dw2_asm_output_symname_uleb128 (label,
10278 "View list begin (%s)",
10279 list_head->vl_symbol);
10280 }
10281 else
10282 dw2_asm_output_data_uleb128 (0,
10283 "View list begin (%s)",
10284 list_head->vl_symbol);
10285
10286 if (!ZERO_VIEW_P (curr->vend))
10287 {
10288 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10289 dw2_asm_output_symname_uleb128 (label,
10290 "View list end (%s)",
10291 list_head->vl_symbol);
10292 }
10293 else
10294 dw2_asm_output_data_uleb128 (0,
10295 "View list end (%s)",
10296 list_head->vl_symbol);
10297 }
10298 else
10299 {
10300 dw2_asm_output_data_uleb128 (curr->vbegin,
10301 "View list begin (%s)",
10302 list_head->vl_symbol);
10303 dw2_asm_output_data_uleb128 (curr->vend,
10304 "View list end (%s)",
10305 list_head->vl_symbol);
10306 }
10307 }
10308 }
10309
10310 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10311
10312 const char *last_section = NULL;
10313 const char *base_label = NULL;
10314
10315 /* Walk the location list, and output each range + expression. */
10316 for (dw_loc_list_ref curr = list_head; curr != NULL;
10317 curr = curr->dw_loc_next)
10318 {
10319 unsigned long size;
10320
10321 /* Skip this entry? If we skip it here, we must skip it in the
10322 view list above as well. */
10323 if (skip_loc_list_entry (curr, &size))
10324 continue;
10325
10326 lcount++;
10327
10328 if (dwarf_version >= 5)
10329 {
10330 if (dwarf_split_debug_info)
10331 {
10332 dwarf2out_maybe_output_loclist_view_pair (curr);
10333 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10334 uleb128 index into .debug_addr and uleb128 length. */
10335 dw2_asm_output_data (1, DW_LLE_startx_length,
10336 "DW_LLE_startx_length (%s)",
10337 list_head->ll_symbol);
10338 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10339 "Location list range start index "
10340 "(%s)", curr->begin);
10341 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10342 For that case we probably need to emit DW_LLE_startx_endx,
10343 but we'd need 2 .debug_addr entries rather than just one. */
10344 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10345 "Location list length (%s)",
10346 list_head->ll_symbol);
10347 }
10348 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10349 {
10350 dwarf2out_maybe_output_loclist_view_pair (curr);
10351 /* If all code is in .text section, the base address is
10352 already provided by the CU attributes. Use
10353 DW_LLE_offset_pair where both addresses are uleb128 encoded
10354 offsets against that base. */
10355 dw2_asm_output_data (1, DW_LLE_offset_pair,
10356 "DW_LLE_offset_pair (%s)",
10357 list_head->ll_symbol);
10358 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10359 "Location list begin address (%s)",
10360 list_head->ll_symbol);
10361 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10362 "Location list end address (%s)",
10363 list_head->ll_symbol);
10364 }
10365 else if (HAVE_AS_LEB128)
10366 {
10367 /* Otherwise, find out how many consecutive entries could share
10368 the same base entry. If just one, emit DW_LLE_start_length,
10369 otherwise emit DW_LLE_base_address for the base address
10370 followed by a series of DW_LLE_offset_pair. */
10371 if (last_section == NULL || curr->section != last_section)
10372 {
10373 dw_loc_list_ref curr2;
10374 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10375 curr2 = curr2->dw_loc_next)
10376 {
10377 if (strcmp (curr2->begin, curr2->end) == 0
10378 && !curr2->force)
10379 continue;
10380 break;
10381 }
10382 if (curr2 == NULL || curr->section != curr2->section)
10383 last_section = NULL;
10384 else
10385 {
10386 last_section = curr->section;
10387 base_label = curr->begin;
10388 dw2_asm_output_data (1, DW_LLE_base_address,
10389 "DW_LLE_base_address (%s)",
10390 list_head->ll_symbol);
10391 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10392 "Base address (%s)",
10393 list_head->ll_symbol);
10394 }
10395 }
10396 /* Only one entry with the same base address. Use
10397 DW_LLE_start_length with absolute address and uleb128
10398 length. */
10399 if (last_section == NULL)
10400 {
10401 dwarf2out_maybe_output_loclist_view_pair (curr);
10402 dw2_asm_output_data (1, DW_LLE_start_length,
10403 "DW_LLE_start_length (%s)",
10404 list_head->ll_symbol);
10405 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10406 "Location list begin address (%s)",
10407 list_head->ll_symbol);
10408 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10409 "Location list length "
10410 "(%s)", list_head->ll_symbol);
10411 }
10412 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10413 DW_LLE_base_address. */
10414 else
10415 {
10416 dwarf2out_maybe_output_loclist_view_pair (curr);
10417 dw2_asm_output_data (1, DW_LLE_offset_pair,
10418 "DW_LLE_offset_pair (%s)",
10419 list_head->ll_symbol);
10420 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10421 "Location list begin address "
10422 "(%s)", list_head->ll_symbol);
10423 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10424 "Location list end address "
10425 "(%s)", list_head->ll_symbol);
10426 }
10427 }
10428 /* The assembler does not support .uleb128 directive. Emit
10429 DW_LLE_start_end with a pair of absolute addresses. */
10430 else
10431 {
10432 dwarf2out_maybe_output_loclist_view_pair (curr);
10433 dw2_asm_output_data (1, DW_LLE_start_end,
10434 "DW_LLE_start_end (%s)",
10435 list_head->ll_symbol);
10436 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10437 "Location list begin address (%s)",
10438 list_head->ll_symbol);
10439 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10440 "Location list end address (%s)",
10441 list_head->ll_symbol);
10442 }
10443 }
10444 else if (dwarf_split_debug_info)
10445 {
10446 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10447 and 4 byte length. */
10448 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10449 "Location list start/length entry (%s)",
10450 list_head->ll_symbol);
10451 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10452 "Location list range start index (%s)",
10453 curr->begin);
10454 /* The length field is 4 bytes. If we ever need to support
10455 an 8-byte length, we can add a new DW_LLE code or fall back
10456 to DW_LLE_GNU_start_end_entry. */
10457 dw2_asm_output_delta (4, curr->end, curr->begin,
10458 "Location list range length (%s)",
10459 list_head->ll_symbol);
10460 }
10461 else if (!have_multiple_function_sections)
10462 {
10463 /* Pair of relative addresses against start of text section. */
10464 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10465 "Location list begin address (%s)",
10466 list_head->ll_symbol);
10467 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10468 "Location list end address (%s)",
10469 list_head->ll_symbol);
10470 }
10471 else
10472 {
10473 /* Pair of absolute addresses. */
10474 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10475 "Location list begin address (%s)",
10476 list_head->ll_symbol);
10477 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10478 "Location list end address (%s)",
10479 list_head->ll_symbol);
10480 }
10481
10482 /* Output the block length for this list of location operations. */
10483 if (dwarf_version >= 5)
10484 dw2_asm_output_data_uleb128 (size, "Location expression size");
10485 else
10486 {
10487 gcc_assert (size <= 0xffff);
10488 dw2_asm_output_data (2, size, "Location expression size");
10489 }
10490
10491 output_loc_sequence (curr->expr, -1);
10492 }
10493
10494 /* And finally list termination. */
10495 if (dwarf_version >= 5)
10496 dw2_asm_output_data (1, DW_LLE_end_of_list,
10497 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10498 else if (dwarf_split_debug_info)
10499 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10500 "Location list terminator (%s)",
10501 list_head->ll_symbol);
10502 else
10503 {
10504 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10505 "Location list terminator begin (%s)",
10506 list_head->ll_symbol);
10507 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10508 "Location list terminator end (%s)",
10509 list_head->ll_symbol);
10510 }
10511
10512 gcc_assert (!list_head->vl_symbol
10513 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10514 }
10515
10516 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10517 section. Emit a relocated reference if val_entry is NULL, otherwise,
10518 emit an indirect reference. */
10519
10520 static void
10521 output_range_list_offset (dw_attr_node *a)
10522 {
10523 const char *name = dwarf_attr_name (a->dw_attr);
10524
10525 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10526 {
10527 if (dwarf_version >= 5)
10528 {
10529 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10530 dw2_asm_output_offset (dwarf_offset_size, r->label,
10531 debug_ranges_section, "%s", name);
10532 }
10533 else
10534 {
10535 char *p = strchr (ranges_section_label, '\0');
10536 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10537 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10538 dw2_asm_output_offset (dwarf_offset_size, ranges_section_label,
10539 debug_ranges_section, "%s", name);
10540 *p = '\0';
10541 }
10542 }
10543 else if (dwarf_version >= 5)
10544 {
10545 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10546 gcc_assert (rnglist_idx);
10547 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10548 }
10549 else
10550 dw2_asm_output_data (dwarf_offset_size,
10551 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10552 "%s (offset from %s)", name, ranges_section_label);
10553 }
10554
10555 /* Output the offset into the debug_loc section. */
10556
10557 static void
10558 output_loc_list_offset (dw_attr_node *a)
10559 {
10560 char *sym = AT_loc_list (a)->ll_symbol;
10561
10562 gcc_assert (sym);
10563 if (!dwarf_split_debug_info)
10564 dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section,
10565 "%s", dwarf_attr_name (a->dw_attr));
10566 else if (dwarf_version >= 5)
10567 {
10568 gcc_assert (AT_loc_list (a)->num_assigned);
10569 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10570 dwarf_attr_name (a->dw_attr),
10571 sym);
10572 }
10573 else
10574 dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label,
10575 "%s", dwarf_attr_name (a->dw_attr));
10576 }
10577
10578 /* Output the offset into the debug_loc section. */
10579
10580 static void
10581 output_view_list_offset (dw_attr_node *a)
10582 {
10583 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10584
10585 gcc_assert (sym);
10586 if (dwarf_split_debug_info)
10587 dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label,
10588 "%s", dwarf_attr_name (a->dw_attr));
10589 else
10590 dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section,
10591 "%s", dwarf_attr_name (a->dw_attr));
10592 }
10593
10594 /* Output an attribute's index or value appropriately. */
10595
10596 static void
10597 output_attr_index_or_value (dw_attr_node *a)
10598 {
10599 const char *name = dwarf_attr_name (a->dw_attr);
10600
10601 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10602 {
10603 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10604 return;
10605 }
10606 switch (AT_class (a))
10607 {
10608 case dw_val_class_addr:
10609 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10610 break;
10611 case dw_val_class_high_pc:
10612 case dw_val_class_lbl_id:
10613 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10614 break;
10615 default:
10616 gcc_unreachable ();
10617 }
10618 }
10619
10620 /* Output a type signature. */
10621
10622 static inline void
10623 output_signature (const char *sig, const char *name)
10624 {
10625 int i;
10626
10627 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10628 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10629 }
10630
10631 /* Output a discriminant value. */
10632
10633 static inline void
10634 output_discr_value (dw_discr_value *discr_value, const char *name)
10635 {
10636 if (discr_value->pos)
10637 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10638 else
10639 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10640 }
10641
10642 /* Output the DIE and its attributes. Called recursively to generate
10643 the definitions of each child DIE. */
10644
10645 static void
10646 output_die (dw_die_ref die)
10647 {
10648 dw_attr_node *a;
10649 dw_die_ref c;
10650 unsigned long size;
10651 unsigned ix;
10652
10653 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10654 (unsigned long)die->die_offset,
10655 dwarf_tag_name (die->die_tag));
10656
10657 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10658 {
10659 const char *name = dwarf_attr_name (a->dw_attr);
10660
10661 switch (AT_class (a))
10662 {
10663 case dw_val_class_addr:
10664 output_attr_index_or_value (a);
10665 break;
10666
10667 case dw_val_class_offset:
10668 dw2_asm_output_data (dwarf_offset_size, a->dw_attr_val.v.val_offset,
10669 "%s", name);
10670 break;
10671
10672 case dw_val_class_range_list:
10673 output_range_list_offset (a);
10674 break;
10675
10676 case dw_val_class_loc:
10677 size = size_of_locs (AT_loc (a));
10678
10679 /* Output the block length for this list of location operations. */
10680 if (dwarf_version >= 4)
10681 dw2_asm_output_data_uleb128 (size, "%s", name);
10682 else
10683 dw2_asm_output_data (constant_size (size), size, "%s", name);
10684
10685 output_loc_sequence (AT_loc (a), -1);
10686 break;
10687
10688 case dw_val_class_const:
10689 /* ??? It would be slightly more efficient to use a scheme like is
10690 used for unsigned constants below, but gdb 4.x does not sign
10691 extend. Gdb 5.x does sign extend. */
10692 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10693 break;
10694
10695 case dw_val_class_unsigned_const:
10696 {
10697 int csize = constant_size (AT_unsigned (a));
10698 if (dwarf_version == 3
10699 && a->dw_attr == DW_AT_data_member_location
10700 && csize >= 4)
10701 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10702 else
10703 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10704 }
10705 break;
10706
10707 case dw_val_class_symview:
10708 {
10709 int vsize;
10710 if (symview_upper_bound <= 0xff)
10711 vsize = 1;
10712 else if (symview_upper_bound <= 0xffff)
10713 vsize = 2;
10714 else if (symview_upper_bound <= 0xffffffff)
10715 vsize = 4;
10716 else
10717 vsize = 8;
10718 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10719 "%s", name);
10720 }
10721 break;
10722
10723 case dw_val_class_const_implicit:
10724 if (flag_debug_asm)
10725 fprintf (asm_out_file, "\t\t\t%s %s ("
10726 HOST_WIDE_INT_PRINT_DEC ")\n",
10727 ASM_COMMENT_START, name, AT_int (a));
10728 break;
10729
10730 case dw_val_class_unsigned_const_implicit:
10731 if (flag_debug_asm)
10732 fprintf (asm_out_file, "\t\t\t%s %s ("
10733 HOST_WIDE_INT_PRINT_HEX ")\n",
10734 ASM_COMMENT_START, name, AT_unsigned (a));
10735 break;
10736
10737 case dw_val_class_const_double:
10738 {
10739 unsigned HOST_WIDE_INT first, second;
10740
10741 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10742 dw2_asm_output_data (1,
10743 HOST_BITS_PER_DOUBLE_INT
10744 / HOST_BITS_PER_CHAR,
10745 NULL);
10746
10747 if (WORDS_BIG_ENDIAN)
10748 {
10749 first = a->dw_attr_val.v.val_double.high;
10750 second = a->dw_attr_val.v.val_double.low;
10751 }
10752 else
10753 {
10754 first = a->dw_attr_val.v.val_double.low;
10755 second = a->dw_attr_val.v.val_double.high;
10756 }
10757
10758 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10759 first, "%s", name);
10760 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10761 second, NULL);
10762 }
10763 break;
10764
10765 case dw_val_class_wide_int:
10766 {
10767 int i;
10768 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10769 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10770 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10771 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10772 * l, NULL);
10773
10774 if (WORDS_BIG_ENDIAN)
10775 for (i = len - 1; i >= 0; --i)
10776 {
10777 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10778 "%s", name);
10779 name = "";
10780 }
10781 else
10782 for (i = 0; i < len; ++i)
10783 {
10784 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10785 "%s", name);
10786 name = "";
10787 }
10788 }
10789 break;
10790
10791 case dw_val_class_vec:
10792 {
10793 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10794 unsigned int len = a->dw_attr_val.v.val_vec.length;
10795 unsigned int i;
10796 unsigned char *p;
10797
10798 dw2_asm_output_data (constant_size (len * elt_size),
10799 len * elt_size, "%s", name);
10800 if (elt_size > sizeof (HOST_WIDE_INT))
10801 {
10802 elt_size /= 2;
10803 len *= 2;
10804 }
10805 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10806 i < len;
10807 i++, p += elt_size)
10808 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10809 "fp or vector constant word %u", i);
10810 break;
10811 }
10812
10813 case dw_val_class_flag:
10814 if (dwarf_version >= 4)
10815 {
10816 /* Currently all add_AT_flag calls pass in 1 as last argument,
10817 so DW_FORM_flag_present can be used. If that ever changes,
10818 we'll need to use DW_FORM_flag and have some optimization
10819 in build_abbrev_table that will change those to
10820 DW_FORM_flag_present if it is set to 1 in all DIEs using
10821 the same abbrev entry. */
10822 gcc_assert (AT_flag (a) == 1);
10823 if (flag_debug_asm)
10824 fprintf (asm_out_file, "\t\t\t%s %s\n",
10825 ASM_COMMENT_START, name);
10826 break;
10827 }
10828 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10829 break;
10830
10831 case dw_val_class_loc_list:
10832 output_loc_list_offset (a);
10833 break;
10834
10835 case dw_val_class_view_list:
10836 output_view_list_offset (a);
10837 break;
10838
10839 case dw_val_class_die_ref:
10840 if (AT_ref_external (a))
10841 {
10842 if (AT_ref (a)->comdat_type_p)
10843 {
10844 comdat_type_node *type_node
10845 = AT_ref (a)->die_id.die_type_node;
10846
10847 gcc_assert (type_node);
10848 output_signature (type_node->signature, name);
10849 }
10850 else
10851 {
10852 const char *sym = AT_ref (a)->die_id.die_symbol;
10853 int size;
10854
10855 gcc_assert (sym);
10856 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10857 length, whereas in DWARF3 it's always sized as an
10858 offset. */
10859 if (dwarf_version == 2)
10860 size = DWARF2_ADDR_SIZE;
10861 else
10862 size = dwarf_offset_size;
10863 /* ??? We cannot unconditionally output die_offset if
10864 non-zero - others might create references to those
10865 DIEs via symbols.
10866 And we do not clear its DIE offset after outputting it
10867 (and the label refers to the actual DIEs, not the
10868 DWARF CU unit header which is when using label + offset
10869 would be the correct thing to do).
10870 ??? This is the reason for the with_offset flag. */
10871 if (AT_ref (a)->with_offset)
10872 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10873 debug_info_section, "%s", name);
10874 else
10875 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10876 name);
10877 }
10878 }
10879 else
10880 {
10881 gcc_assert (AT_ref (a)->die_offset);
10882 dw2_asm_output_data (dwarf_offset_size, AT_ref (a)->die_offset,
10883 "%s", name);
10884 }
10885 break;
10886
10887 case dw_val_class_fde_ref:
10888 {
10889 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10890
10891 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10892 a->dw_attr_val.v.val_fde_index * 2);
10893 dw2_asm_output_offset (dwarf_offset_size, l1, debug_frame_section,
10894 "%s", name);
10895 }
10896 break;
10897
10898 case dw_val_class_vms_delta:
10899 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10900 dw2_asm_output_vms_delta (dwarf_offset_size,
10901 AT_vms_delta2 (a), AT_vms_delta1 (a),
10902 "%s", name);
10903 #else
10904 dw2_asm_output_delta (dwarf_offset_size,
10905 AT_vms_delta2 (a), AT_vms_delta1 (a),
10906 "%s", name);
10907 #endif
10908 break;
10909
10910 case dw_val_class_lbl_id:
10911 output_attr_index_or_value (a);
10912 break;
10913
10914 case dw_val_class_lineptr:
10915 dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a),
10916 debug_line_section, "%s", name);
10917 break;
10918
10919 case dw_val_class_macptr:
10920 dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a),
10921 debug_macinfo_section, "%s", name);
10922 break;
10923
10924 case dw_val_class_loclistsptr:
10925 dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a),
10926 debug_loc_section, "%s", name);
10927 break;
10928
10929 case dw_val_class_str:
10930 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10931 dw2_asm_output_offset (dwarf_offset_size,
10932 a->dw_attr_val.v.val_str->label,
10933 debug_str_section,
10934 "%s: \"%s\"", name, AT_string (a));
10935 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10936 dw2_asm_output_offset (dwarf_offset_size,
10937 a->dw_attr_val.v.val_str->label,
10938 debug_line_str_section,
10939 "%s: \"%s\"", name, AT_string (a));
10940 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10941 dw2_asm_output_data_uleb128 (AT_index (a),
10942 "%s: \"%s\"", name, AT_string (a));
10943 else
10944 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10945 break;
10946
10947 case dw_val_class_file:
10948 {
10949 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10950
10951 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10952 a->dw_attr_val.v.val_file->filename);
10953 break;
10954 }
10955
10956 case dw_val_class_file_implicit:
10957 if (flag_debug_asm)
10958 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10959 ASM_COMMENT_START, name,
10960 maybe_emit_file (a->dw_attr_val.v.val_file),
10961 a->dw_attr_val.v.val_file->filename);
10962 break;
10963
10964 case dw_val_class_data8:
10965 {
10966 int i;
10967
10968 for (i = 0; i < 8; i++)
10969 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10970 i == 0 ? "%s" : NULL, name);
10971 break;
10972 }
10973
10974 case dw_val_class_high_pc:
10975 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10976 get_AT_low_pc (die), "DW_AT_high_pc");
10977 break;
10978
10979 case dw_val_class_discr_value:
10980 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10981 break;
10982
10983 case dw_val_class_discr_list:
10984 {
10985 dw_discr_list_ref list = AT_discr_list (a);
10986 const int size = size_of_discr_list (list);
10987
10988 /* This is a block, so output its length first. */
10989 dw2_asm_output_data (constant_size (size), size,
10990 "%s: block size", name);
10991
10992 for (; list != NULL; list = list->dw_discr_next)
10993 {
10994 /* One byte for the discriminant value descriptor, and then as
10995 many LEB128 numbers as required. */
10996 if (list->dw_discr_range)
10997 dw2_asm_output_data (1, DW_DSC_range,
10998 "%s: DW_DSC_range", name);
10999 else
11000 dw2_asm_output_data (1, DW_DSC_label,
11001 "%s: DW_DSC_label", name);
11002
11003 output_discr_value (&list->dw_discr_lower_bound, name);
11004 if (list->dw_discr_range)
11005 output_discr_value (&list->dw_discr_upper_bound, name);
11006 }
11007 break;
11008 }
11009
11010 default:
11011 gcc_unreachable ();
11012 }
11013 }
11014
11015 FOR_EACH_CHILD (die, c, output_die (c));
11016
11017 /* Add null byte to terminate sibling list. */
11018 if (die->die_child != NULL)
11019 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
11020 (unsigned long) die->die_offset);
11021 }
11022
11023 /* Output the dwarf version number. */
11024
11025 static void
11026 output_dwarf_version ()
11027 {
11028 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
11029 views in loclist. That will change eventually. */
11030 if (dwarf_version == 6)
11031 {
11032 static bool once;
11033 if (!once)
11034 {
11035 warning (0, "%<-gdwarf-6%> is output as version 5 with "
11036 "incompatibilities");
11037 once = true;
11038 }
11039 dw2_asm_output_data (2, 5, "DWARF version number");
11040 }
11041 else
11042 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
11043 }
11044
11045 /* Output the compilation unit that appears at the beginning of the
11046 .debug_info section, and precedes the DIE descriptions. */
11047
11048 static void
11049 output_compilation_unit_header (enum dwarf_unit_type ut)
11050 {
11051 if (!XCOFF_DEBUGGING_INFO)
11052 {
11053 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11054 dw2_asm_output_data (4, 0xffffffff,
11055 "Initial length escape value indicating 64-bit DWARF extension");
11056 dw2_asm_output_data (dwarf_offset_size,
11057 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11058 "Length of Compilation Unit Info");
11059 }
11060
11061 output_dwarf_version ();
11062 if (dwarf_version >= 5)
11063 {
11064 const char *name;
11065 switch (ut)
11066 {
11067 case DW_UT_compile: name = "DW_UT_compile"; break;
11068 case DW_UT_type: name = "DW_UT_type"; break;
11069 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11070 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11071 default: gcc_unreachable ();
11072 }
11073 dw2_asm_output_data (1, ut, "%s", name);
11074 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11075 }
11076 dw2_asm_output_offset (dwarf_offset_size, abbrev_section_label,
11077 debug_abbrev_section,
11078 "Offset Into Abbrev. Section");
11079 if (dwarf_version < 5)
11080 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11081 }
11082
11083 /* Output the compilation unit DIE and its children. */
11084
11085 static void
11086 output_comp_unit (dw_die_ref die, int output_if_empty,
11087 const unsigned char *dwo_id)
11088 {
11089 const char *secname, *oldsym;
11090 char *tmp;
11091
11092 /* Unless we are outputting main CU, we may throw away empty ones. */
11093 if (!output_if_empty && die->die_child == NULL)
11094 return;
11095
11096 /* Even if there are no children of this DIE, we must output the information
11097 about the compilation unit. Otherwise, on an empty translation unit, we
11098 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11099 will then complain when examining the file. First mark all the DIEs in
11100 this CU so we know which get local refs. */
11101 mark_dies (die);
11102
11103 external_ref_hash_type *extern_map = optimize_external_refs (die);
11104
11105 /* For now, optimize only the main CU, in order to optimize the rest
11106 we'd need to see all of them earlier. Leave the rest for post-linking
11107 tools like DWZ. */
11108 if (die == comp_unit_die ())
11109 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11110
11111 build_abbrev_table (die, extern_map);
11112
11113 optimize_abbrev_table ();
11114
11115 delete extern_map;
11116
11117 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11118 next_die_offset = (dwo_id
11119 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11120 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11121 calc_die_sizes (die);
11122
11123 oldsym = die->die_id.die_symbol;
11124 if (oldsym && die->comdat_type_p)
11125 {
11126 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11127
11128 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11129 secname = tmp;
11130 die->die_id.die_symbol = NULL;
11131 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11132 }
11133 else
11134 {
11135 switch_to_section (debug_info_section);
11136 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11137 info_section_emitted = true;
11138 }
11139
11140 /* For LTO cross unit DIE refs we want a symbol on the start of the
11141 debuginfo section, not on the CU DIE. */
11142 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11143 {
11144 /* ??? No way to get visibility assembled without a decl. */
11145 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11146 get_identifier (oldsym), char_type_node);
11147 TREE_PUBLIC (decl) = true;
11148 TREE_STATIC (decl) = true;
11149 DECL_ARTIFICIAL (decl) = true;
11150 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11151 DECL_VISIBILITY_SPECIFIED (decl) = true;
11152 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11153 #ifdef ASM_WEAKEN_LABEL
11154 /* We prefer a .weak because that handles duplicates from duplicate
11155 archive members in a graceful way. */
11156 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11157 #else
11158 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11159 #endif
11160 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11161 }
11162
11163 /* Output debugging information. */
11164 output_compilation_unit_header (dwo_id
11165 ? DW_UT_split_compile : DW_UT_compile);
11166 if (dwarf_version >= 5)
11167 {
11168 if (dwo_id != NULL)
11169 for (int i = 0; i < 8; i++)
11170 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11171 }
11172 output_die (die);
11173
11174 /* Leave the marks on the main CU, so we can check them in
11175 output_pubnames. */
11176 if (oldsym)
11177 {
11178 unmark_dies (die);
11179 die->die_id.die_symbol = oldsym;
11180 }
11181 }
11182
11183 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11184 and .debug_pubtypes. This is configured per-target, but can be
11185 overridden by the -gpubnames or -gno-pubnames options. */
11186
11187 static inline bool
11188 want_pubnames (void)
11189 {
11190 if (debug_info_level <= DINFO_LEVEL_TERSE
11191 /* Names and types go to the early debug part only. */
11192 || in_lto_p)
11193 return false;
11194 if (debug_generate_pub_sections != -1)
11195 return debug_generate_pub_sections;
11196 return targetm.want_debug_pub_sections;
11197 }
11198
11199 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11200
11201 static void
11202 add_AT_pubnames (dw_die_ref die)
11203 {
11204 if (want_pubnames ())
11205 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11206 }
11207
11208 /* Add a string attribute value to a skeleton DIE. */
11209
11210 static inline void
11211 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11212 const char *str)
11213 {
11214 dw_attr_node attr;
11215 struct indirect_string_node *node;
11216
11217 if (! skeleton_debug_str_hash)
11218 skeleton_debug_str_hash
11219 = hash_table<indirect_string_hasher>::create_ggc (10);
11220
11221 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11222 find_string_form (node);
11223 if (node->form == dwarf_FORM (DW_FORM_strx))
11224 node->form = DW_FORM_strp;
11225
11226 attr.dw_attr = attr_kind;
11227 attr.dw_attr_val.val_class = dw_val_class_str;
11228 attr.dw_attr_val.val_entry = NULL;
11229 attr.dw_attr_val.v.val_str = node;
11230 add_dwarf_attr (die, &attr);
11231 }
11232
11233 /* Helper function to generate top-level dies for skeleton debug_info and
11234 debug_types. */
11235
11236 static void
11237 add_top_level_skeleton_die_attrs (dw_die_ref die)
11238 {
11239 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11240 const char *comp_dir = comp_dir_string ();
11241
11242 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11243 if (comp_dir != NULL)
11244 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11245 add_AT_pubnames (die);
11246 if (addr_index_table != NULL && addr_index_table->size () > 0)
11247 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11248 }
11249
11250 /* Output skeleton debug sections that point to the dwo file. */
11251
11252 static void
11253 output_skeleton_debug_sections (dw_die_ref comp_unit,
11254 const unsigned char *dwo_id)
11255 {
11256 /* These attributes will be found in the full debug_info section. */
11257 remove_AT (comp_unit, DW_AT_producer);
11258 remove_AT (comp_unit, DW_AT_language);
11259
11260 switch_to_section (debug_skeleton_info_section);
11261 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11262
11263 /* Produce the skeleton compilation-unit header. This one differs enough from
11264 a normal CU header that it's better not to call output_compilation_unit
11265 header. */
11266 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11267 dw2_asm_output_data (4, 0xffffffff,
11268 "Initial length escape value indicating 64-bit "
11269 "DWARF extension");
11270
11271 dw2_asm_output_data (dwarf_offset_size,
11272 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11273 - DWARF_INITIAL_LENGTH_SIZE
11274 + size_of_die (comp_unit),
11275 "Length of Compilation Unit Info");
11276 output_dwarf_version ();
11277 if (dwarf_version >= 5)
11278 {
11279 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11280 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11281 }
11282 dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_abbrev_section_label,
11283 debug_skeleton_abbrev_section,
11284 "Offset Into Abbrev. Section");
11285 if (dwarf_version < 5)
11286 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11287 else
11288 for (int i = 0; i < 8; i++)
11289 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11290
11291 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11292 output_die (comp_unit);
11293
11294 /* Build the skeleton debug_abbrev section. */
11295 switch_to_section (debug_skeleton_abbrev_section);
11296 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11297
11298 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11299
11300 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11301 }
11302
11303 /* Output a comdat type unit DIE and its children. */
11304
11305 static void
11306 output_comdat_type_unit (comdat_type_node *node,
11307 bool early_lto_debug ATTRIBUTE_UNUSED)
11308 {
11309 const char *secname;
11310 char *tmp;
11311 int i;
11312 #if defined (OBJECT_FORMAT_ELF)
11313 tree comdat_key;
11314 #endif
11315
11316 /* First mark all the DIEs in this CU so we know which get local refs. */
11317 mark_dies (node->root_die);
11318
11319 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11320
11321 build_abbrev_table (node->root_die, extern_map);
11322
11323 delete extern_map;
11324 extern_map = NULL;
11325
11326 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11327 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11328 calc_die_sizes (node->root_die);
11329
11330 #if defined (OBJECT_FORMAT_ELF)
11331 if (dwarf_version >= 5)
11332 {
11333 if (!dwarf_split_debug_info)
11334 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11335 else
11336 secname = (early_lto_debug
11337 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11338 }
11339 else if (!dwarf_split_debug_info)
11340 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11341 else
11342 secname = (early_lto_debug
11343 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11344
11345 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11346 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11347 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11348 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11349 comdat_key = get_identifier (tmp);
11350 targetm.asm_out.named_section (secname,
11351 SECTION_DEBUG | SECTION_LINKONCE,
11352 comdat_key);
11353 #else
11354 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11355 sprintf (tmp, (dwarf_version >= 5
11356 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11357 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11358 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11359 secname = tmp;
11360 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11361 #endif
11362
11363 /* Output debugging information. */
11364 output_compilation_unit_header (dwarf_split_debug_info
11365 ? DW_UT_split_type : DW_UT_type);
11366 output_signature (node->signature, "Type Signature");
11367 dw2_asm_output_data (dwarf_offset_size, node->type_die->die_offset,
11368 "Offset to Type DIE");
11369 output_die (node->root_die);
11370
11371 unmark_dies (node->root_die);
11372 }
11373
11374 /* Return the DWARF2/3 pubname associated with a decl. */
11375
11376 static const char *
11377 dwarf2_name (tree decl, int scope)
11378 {
11379 if (DECL_NAMELESS (decl))
11380 return NULL;
11381 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11382 }
11383
11384 /* Add a new entry to .debug_pubnames if appropriate. */
11385
11386 static void
11387 add_pubname_string (const char *str, dw_die_ref die)
11388 {
11389 pubname_entry e;
11390
11391 e.die = die;
11392 e.name = xstrdup (str);
11393 vec_safe_push (pubname_table, e);
11394 }
11395
11396 static void
11397 add_pubname (tree decl, dw_die_ref die)
11398 {
11399 if (!want_pubnames ())
11400 return;
11401
11402 /* Don't add items to the table when we expect that the consumer will have
11403 just read the enclosing die. For example, if the consumer is looking at a
11404 class_member, it will either be inside the class already, or will have just
11405 looked up the class to find the member. Either way, searching the class is
11406 faster than searching the index. */
11407 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11408 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11409 {
11410 const char *name = dwarf2_name (decl, 1);
11411
11412 if (name)
11413 add_pubname_string (name, die);
11414 }
11415 }
11416
11417 /* Add an enumerator to the pubnames section. */
11418
11419 static void
11420 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11421 {
11422 pubname_entry e;
11423
11424 gcc_assert (scope_name);
11425 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11426 e.die = die;
11427 vec_safe_push (pubname_table, e);
11428 }
11429
11430 /* Add a new entry to .debug_pubtypes if appropriate. */
11431
11432 static void
11433 add_pubtype (tree decl, dw_die_ref die)
11434 {
11435 pubname_entry e;
11436
11437 if (!want_pubnames ())
11438 return;
11439
11440 if ((TREE_PUBLIC (decl)
11441 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11442 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11443 {
11444 tree scope = NULL;
11445 const char *scope_name = "";
11446 const char *sep = is_cxx () ? "::" : ".";
11447 const char *name;
11448
11449 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11450 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11451 {
11452 scope_name = lang_hooks.dwarf_name (scope, 1);
11453 if (scope_name != NULL && scope_name[0] != '\0')
11454 scope_name = concat (scope_name, sep, NULL);
11455 else
11456 scope_name = "";
11457 }
11458
11459 if (TYPE_P (decl))
11460 name = type_tag (decl);
11461 else
11462 name = lang_hooks.dwarf_name (decl, 1);
11463
11464 /* If we don't have a name for the type, there's no point in adding
11465 it to the table. */
11466 if (name != NULL && name[0] != '\0')
11467 {
11468 e.die = die;
11469 e.name = concat (scope_name, name, NULL);
11470 vec_safe_push (pubtype_table, e);
11471 }
11472
11473 /* Although it might be more consistent to add the pubinfo for the
11474 enumerators as their dies are created, they should only be added if the
11475 enum type meets the criteria above. So rather than re-check the parent
11476 enum type whenever an enumerator die is created, just output them all
11477 here. This isn't protected by the name conditional because anonymous
11478 enums don't have names. */
11479 if (die->die_tag == DW_TAG_enumeration_type)
11480 {
11481 dw_die_ref c;
11482
11483 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11484 }
11485 }
11486 }
11487
11488 /* Output a single entry in the pubnames table. */
11489
11490 static void
11491 output_pubname (dw_offset die_offset, pubname_entry *entry)
11492 {
11493 dw_die_ref die = entry->die;
11494 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11495
11496 dw2_asm_output_data (dwarf_offset_size, die_offset, "DIE offset");
11497
11498 if (debug_generate_pub_sections == 2)
11499 {
11500 /* This logic follows gdb's method for determining the value of the flag
11501 byte. */
11502 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11503 switch (die->die_tag)
11504 {
11505 case DW_TAG_typedef:
11506 case DW_TAG_base_type:
11507 case DW_TAG_subrange_type:
11508 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11509 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11510 break;
11511 case DW_TAG_enumerator:
11512 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11513 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11514 if (!is_cxx ())
11515 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11516 break;
11517 case DW_TAG_subprogram:
11518 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11519 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11520 if (!is_ada ())
11521 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11522 break;
11523 case DW_TAG_constant:
11524 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11525 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11526 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11527 break;
11528 case DW_TAG_variable:
11529 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11530 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11531 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11532 break;
11533 case DW_TAG_namespace:
11534 case DW_TAG_imported_declaration:
11535 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11536 break;
11537 case DW_TAG_class_type:
11538 case DW_TAG_interface_type:
11539 case DW_TAG_structure_type:
11540 case DW_TAG_union_type:
11541 case DW_TAG_enumeration_type:
11542 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11543 if (!is_cxx ())
11544 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11545 break;
11546 default:
11547 /* An unusual tag. Leave the flag-byte empty. */
11548 break;
11549 }
11550 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11551 "GDB-index flags");
11552 }
11553
11554 dw2_asm_output_nstring (entry->name, -1, "external name");
11555 }
11556
11557
11558 /* Output the public names table used to speed up access to externally
11559 visible names; or the public types table used to find type definitions. */
11560
11561 static void
11562 output_pubnames (vec<pubname_entry, va_gc> *names)
11563 {
11564 unsigned i;
11565 unsigned long pubnames_length = size_of_pubnames (names);
11566 pubname_entry *pub;
11567
11568 if (!XCOFF_DEBUGGING_INFO)
11569 {
11570 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11571 dw2_asm_output_data (4, 0xffffffff,
11572 "Initial length escape value indicating 64-bit DWARF extension");
11573 dw2_asm_output_data (dwarf_offset_size, pubnames_length,
11574 "Pub Info Length");
11575 }
11576
11577 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11578 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11579
11580 if (dwarf_split_debug_info)
11581 dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label,
11582 debug_skeleton_info_section,
11583 "Offset of Compilation Unit Info");
11584 else
11585 dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label,
11586 debug_info_section,
11587 "Offset of Compilation Unit Info");
11588 dw2_asm_output_data (dwarf_offset_size, next_die_offset,
11589 "Compilation Unit Length");
11590
11591 FOR_EACH_VEC_ELT (*names, i, pub)
11592 {
11593 if (include_pubname_in_output (names, pub))
11594 {
11595 dw_offset die_offset = pub->die->die_offset;
11596
11597 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11598 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11599 gcc_assert (pub->die->die_mark);
11600
11601 /* If we're putting types in their own .debug_types sections,
11602 the .debug_pubtypes table will still point to the compile
11603 unit (not the type unit), so we want to use the offset of
11604 the skeleton DIE (if there is one). */
11605 if (pub->die->comdat_type_p && names == pubtype_table)
11606 {
11607 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11608
11609 if (type_node != NULL)
11610 die_offset = (type_node->skeleton_die != NULL
11611 ? type_node->skeleton_die->die_offset
11612 : comp_unit_die ()->die_offset);
11613 }
11614
11615 output_pubname (die_offset, pub);
11616 }
11617 }
11618
11619 dw2_asm_output_data (dwarf_offset_size, 0, NULL);
11620 }
11621
11622 /* Output public names and types tables if necessary. */
11623
11624 static void
11625 output_pubtables (void)
11626 {
11627 if (!want_pubnames () || !info_section_emitted)
11628 return;
11629
11630 switch_to_section (debug_pubnames_section);
11631 output_pubnames (pubname_table);
11632 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11633 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11634 simply won't look for the section. */
11635 switch_to_section (debug_pubtypes_section);
11636 output_pubnames (pubtype_table);
11637 }
11638
11639
11640 /* Output the information that goes into the .debug_aranges table.
11641 Namely, define the beginning and ending address range of the
11642 text section generated for this compilation unit. */
11643
11644 static void
11645 output_aranges (void)
11646 {
11647 unsigned i;
11648 unsigned long aranges_length = size_of_aranges ();
11649
11650 if (!XCOFF_DEBUGGING_INFO)
11651 {
11652 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11653 dw2_asm_output_data (4, 0xffffffff,
11654 "Initial length escape value indicating 64-bit DWARF extension");
11655 dw2_asm_output_data (dwarf_offset_size, aranges_length,
11656 "Length of Address Ranges Info");
11657 }
11658
11659 /* Version number for aranges is still 2, even up to DWARF5. */
11660 dw2_asm_output_data (2, 2, "DWARF aranges version");
11661 if (dwarf_split_debug_info)
11662 dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label,
11663 debug_skeleton_info_section,
11664 "Offset of Compilation Unit Info");
11665 else
11666 dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label,
11667 debug_info_section,
11668 "Offset of Compilation Unit Info");
11669 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11670 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11671
11672 /* We need to align to twice the pointer size here. */
11673 if (DWARF_ARANGES_PAD_SIZE)
11674 {
11675 /* Pad using a 2 byte words so that padding is correct for any
11676 pointer size. */
11677 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11678 2 * DWARF2_ADDR_SIZE);
11679 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11680 dw2_asm_output_data (2, 0, NULL);
11681 }
11682
11683 /* It is necessary not to output these entries if the sections were
11684 not used; if the sections were not used, the length will be 0 and
11685 the address may end up as 0 if the section is discarded by ld
11686 --gc-sections, leaving an invalid (0, 0) entry that can be
11687 confused with the terminator. */
11688 if (text_section_used)
11689 {
11690 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11691 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11692 text_section_label, "Length");
11693 }
11694 if (cold_text_section_used)
11695 {
11696 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11697 "Address");
11698 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11699 cold_text_section_label, "Length");
11700 }
11701
11702 if (have_multiple_function_sections)
11703 {
11704 unsigned fde_idx;
11705 dw_fde_ref fde;
11706
11707 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11708 {
11709 if (DECL_IGNORED_P (fde->decl))
11710 continue;
11711 if (!fde->in_std_section)
11712 {
11713 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11714 "Address");
11715 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11716 fde->dw_fde_begin, "Length");
11717 }
11718 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11719 {
11720 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11721 "Address");
11722 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11723 fde->dw_fde_second_begin, "Length");
11724 }
11725 }
11726 }
11727
11728 /* Output the terminator words. */
11729 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11730 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11731 }
11732
11733 /* Add a new entry to .debug_ranges. Return its index into
11734 ranges_table vector. */
11735
11736 static unsigned int
11737 add_ranges_num (int num, bool maybe_new_sec)
11738 {
11739 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11740 vec_safe_push (ranges_table, r);
11741 return vec_safe_length (ranges_table) - 1;
11742 }
11743
11744 /* Add a new entry to .debug_ranges corresponding to a block, or a
11745 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11746 this entry might be in a different section from previous range. */
11747
11748 static unsigned int
11749 add_ranges (const_tree block, bool maybe_new_sec)
11750 {
11751 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11752 }
11753
11754 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11755 chain, or middle entry of a chain that will be directly referred to. */
11756
11757 static void
11758 note_rnglist_head (unsigned int offset)
11759 {
11760 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11761 return;
11762 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11763 }
11764
11765 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11766 When using dwarf_split_debug_info, address attributes in dies destined
11767 for the final executable should be direct references--setting the
11768 parameter force_direct ensures this behavior. */
11769
11770 static void
11771 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11772 bool *added, bool force_direct)
11773 {
11774 unsigned int in_use = vec_safe_length (ranges_by_label);
11775 unsigned int offset;
11776 dw_ranges_by_label rbl = { begin, end };
11777 vec_safe_push (ranges_by_label, rbl);
11778 offset = add_ranges_num (-(int)in_use - 1, true);
11779 if (!*added)
11780 {
11781 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11782 *added = true;
11783 note_rnglist_head (offset);
11784 }
11785 }
11786
11787 /* Emit .debug_ranges section. */
11788
11789 static void
11790 output_ranges (void)
11791 {
11792 unsigned i;
11793 static const char *const start_fmt = "Offset %#x";
11794 const char *fmt = start_fmt;
11795 dw_ranges *r;
11796
11797 switch_to_section (debug_ranges_section);
11798 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11799 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11800 {
11801 int block_num = r->num;
11802
11803 if (block_num > 0)
11804 {
11805 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11806 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11807
11808 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11809 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11810
11811 /* If all code is in the text section, then the compilation
11812 unit base address defaults to DW_AT_low_pc, which is the
11813 base of the text section. */
11814 if (!have_multiple_function_sections)
11815 {
11816 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11817 text_section_label,
11818 fmt, i * 2 * DWARF2_ADDR_SIZE);
11819 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11820 text_section_label, NULL);
11821 }
11822
11823 /* Otherwise, the compilation unit base address is zero,
11824 which allows us to use absolute addresses, and not worry
11825 about whether the target supports cross-section
11826 arithmetic. */
11827 else
11828 {
11829 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11830 fmt, i * 2 * DWARF2_ADDR_SIZE);
11831 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11832 }
11833
11834 fmt = NULL;
11835 }
11836
11837 /* Negative block_num stands for an index into ranges_by_label. */
11838 else if (block_num < 0)
11839 {
11840 int lab_idx = - block_num - 1;
11841
11842 if (!have_multiple_function_sections)
11843 {
11844 gcc_unreachable ();
11845 #if 0
11846 /* If we ever use add_ranges_by_labels () for a single
11847 function section, all we have to do is to take out
11848 the #if 0 above. */
11849 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11850 (*ranges_by_label)[lab_idx].begin,
11851 text_section_label,
11852 fmt, i * 2 * DWARF2_ADDR_SIZE);
11853 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11854 (*ranges_by_label)[lab_idx].end,
11855 text_section_label, NULL);
11856 #endif
11857 }
11858 else
11859 {
11860 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11861 (*ranges_by_label)[lab_idx].begin,
11862 fmt, i * 2 * DWARF2_ADDR_SIZE);
11863 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11864 (*ranges_by_label)[lab_idx].end,
11865 NULL);
11866 }
11867 }
11868 else
11869 {
11870 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11871 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11872 fmt = start_fmt;
11873 }
11874 }
11875 }
11876
11877 /* Non-zero if .debug_line_str should be used for .debug_line section
11878 strings or strings that are likely shareable with those. */
11879 #define DWARF5_USE_DEBUG_LINE_STR \
11880 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11881 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11882 /* FIXME: there is no .debug_line_str.dwo section, \
11883 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11884 && !dwarf_split_debug_info)
11885
11886
11887 /* Returns TRUE if we are outputting DWARF5 and the assembler supports
11888 DWARF5 .debug_line tables using .debug_line_str or we generate
11889 it ourselves, except for split-dwarf which doesn't have a
11890 .debug_line_str. */
11891 static bool
11892 asm_outputs_debug_line_str (void)
11893 {
11894 if (dwarf_version >= 5
11895 && ! output_asm_line_debug_info ()
11896 && DWARF5_USE_DEBUG_LINE_STR)
11897 return true;
11898 else
11899 {
11900 #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG)
11901 return !dwarf_split_debug_info && dwarf_version >= 5;
11902 #else
11903 return false;
11904 #endif
11905 }
11906 }
11907
11908
11909 /* Assign .debug_rnglists indexes. */
11910
11911 static void
11912 index_rnglists (void)
11913 {
11914 unsigned i;
11915 dw_ranges *r;
11916
11917 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11918 if (r->label)
11919 r->idx = rnglist_idx++;
11920 }
11921
11922 /* Emit .debug_rnglists section. */
11923
11924 static void
11925 output_rnglists (unsigned generation)
11926 {
11927 unsigned i;
11928 dw_ranges *r;
11929 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11930 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11931 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11932
11933 switch_to_section (debug_ranges_section);
11934 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11935 /* There are up to 4 unique ranges labels per generation.
11936 See also init_sections_and_labels. */
11937 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11938 2 + generation * 4);
11939 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11940 3 + generation * 4);
11941 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11942 dw2_asm_output_data (4, 0xffffffff,
11943 "Initial length escape value indicating "
11944 "64-bit DWARF extension");
11945 dw2_asm_output_delta (dwarf_offset_size, l2, l1,
11946 "Length of Range Lists");
11947 ASM_OUTPUT_LABEL (asm_out_file, l1);
11948 output_dwarf_version ();
11949 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11950 dw2_asm_output_data (1, 0, "Segment Size");
11951 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11952 about relocation sizes and primarily care about the size of .debug*
11953 sections in linked shared libraries and executables, then
11954 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11955 into it are usually larger than just DW_FORM_sec_offset offsets
11956 into the .debug_rnglists section. */
11957 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11958 "Offset Entry Count");
11959 if (dwarf_split_debug_info)
11960 {
11961 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11962 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11963 if (r->label)
11964 dw2_asm_output_delta (dwarf_offset_size, r->label,
11965 ranges_base_label, NULL);
11966 }
11967
11968 const char *lab = "";
11969 unsigned int len = vec_safe_length (ranges_table);
11970 const char *base = NULL;
11971 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11972 {
11973 int block_num = r->num;
11974
11975 if (r->label)
11976 {
11977 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11978 lab = r->label;
11979 }
11980 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11981 base = NULL;
11982 if (block_num > 0)
11983 {
11984 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11985 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11986
11987 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11988 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11989
11990 if (HAVE_AS_LEB128)
11991 {
11992 /* If all code is in the text section, then the compilation
11993 unit base address defaults to DW_AT_low_pc, which is the
11994 base of the text section. */
11995 if (!have_multiple_function_sections)
11996 {
11997 dw2_asm_output_data (1, DW_RLE_offset_pair,
11998 "DW_RLE_offset_pair (%s)", lab);
11999 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
12000 "Range begin address (%s)", lab);
12001 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
12002 "Range end address (%s)", lab);
12003 continue;
12004 }
12005 if (base == NULL)
12006 {
12007 dw_ranges *r2 = NULL;
12008 if (i < len - 1)
12009 r2 = &(*ranges_table)[i + 1];
12010 if (r2
12011 && r2->num != 0
12012 && r2->label == NULL
12013 && !r2->maybe_new_sec)
12014 {
12015 dw2_asm_output_data (1, DW_RLE_base_address,
12016 "DW_RLE_base_address (%s)", lab);
12017 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12018 "Base address (%s)", lab);
12019 strcpy (basebuf, blabel);
12020 base = basebuf;
12021 }
12022 }
12023 if (base)
12024 {
12025 dw2_asm_output_data (1, DW_RLE_offset_pair,
12026 "DW_RLE_offset_pair (%s)", lab);
12027 dw2_asm_output_delta_uleb128 (blabel, base,
12028 "Range begin address (%s)", lab);
12029 dw2_asm_output_delta_uleb128 (elabel, base,
12030 "Range end address (%s)", lab);
12031 continue;
12032 }
12033 dw2_asm_output_data (1, DW_RLE_start_length,
12034 "DW_RLE_start_length (%s)", lab);
12035 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12036 "Range begin address (%s)", lab);
12037 dw2_asm_output_delta_uleb128 (elabel, blabel,
12038 "Range length (%s)", lab);
12039 }
12040 else
12041 {
12042 dw2_asm_output_data (1, DW_RLE_start_end,
12043 "DW_RLE_start_end (%s)", lab);
12044 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12045 "Range begin address (%s)", lab);
12046 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12047 "Range end address (%s)", lab);
12048 }
12049 }
12050
12051 /* Negative block_num stands for an index into ranges_by_label. */
12052 else if (block_num < 0)
12053 {
12054 int lab_idx = - block_num - 1;
12055 const char *blabel = (*ranges_by_label)[lab_idx].begin;
12056 const char *elabel = (*ranges_by_label)[lab_idx].end;
12057
12058 if (!have_multiple_function_sections)
12059 gcc_unreachable ();
12060 if (HAVE_AS_LEB128)
12061 {
12062 dw2_asm_output_data (1, DW_RLE_start_length,
12063 "DW_RLE_start_length (%s)", lab);
12064 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12065 "Range begin address (%s)", lab);
12066 dw2_asm_output_delta_uleb128 (elabel, blabel,
12067 "Range length (%s)", lab);
12068 }
12069 else
12070 {
12071 dw2_asm_output_data (1, DW_RLE_start_end,
12072 "DW_RLE_start_end (%s)", lab);
12073 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12074 "Range begin address (%s)", lab);
12075 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12076 "Range end address (%s)", lab);
12077 }
12078 }
12079 else
12080 dw2_asm_output_data (1, DW_RLE_end_of_list,
12081 "DW_RLE_end_of_list (%s)", lab);
12082 }
12083 ASM_OUTPUT_LABEL (asm_out_file, l2);
12084 }
12085
12086 /* Data structure containing information about input files. */
12087 struct file_info
12088 {
12089 const char *path; /* Complete file name. */
12090 const char *fname; /* File name part. */
12091 int length; /* Length of entire string. */
12092 struct dwarf_file_data * file_idx; /* Index in input file table. */
12093 int dir_idx; /* Index in directory table. */
12094 };
12095
12096 /* Data structure containing information about directories with source
12097 files. */
12098 struct dir_info
12099 {
12100 const char *path; /* Path including directory name. */
12101 int length; /* Path length. */
12102 int prefix; /* Index of directory entry which is a prefix. */
12103 int count; /* Number of files in this directory. */
12104 int dir_idx; /* Index of directory used as base. */
12105 };
12106
12107 /* Callback function for file_info comparison. We sort by looking at
12108 the directories in the path. */
12109
12110 static int
12111 file_info_cmp (const void *p1, const void *p2)
12112 {
12113 const struct file_info *const s1 = (const struct file_info *) p1;
12114 const struct file_info *const s2 = (const struct file_info *) p2;
12115 const unsigned char *cp1;
12116 const unsigned char *cp2;
12117
12118 /* Take care of file names without directories. We need to make sure that
12119 we return consistent values to qsort since some will get confused if
12120 we return the same value when identical operands are passed in opposite
12121 orders. So if neither has a directory, return 0 and otherwise return
12122 1 or -1 depending on which one has the directory. We want the one with
12123 the directory to sort after the one without, so all no directory files
12124 are at the start (normally only the compilation unit file). */
12125 if ((s1->path == s1->fname || s2->path == s2->fname))
12126 return (s2->path == s2->fname) - (s1->path == s1->fname);
12127
12128 cp1 = (const unsigned char *) s1->path;
12129 cp2 = (const unsigned char *) s2->path;
12130
12131 while (1)
12132 {
12133 ++cp1;
12134 ++cp2;
12135 /* Reached the end of the first path? If so, handle like above,
12136 but now we want longer directory prefixes before shorter ones. */
12137 if ((cp1 == (const unsigned char *) s1->fname)
12138 || (cp2 == (const unsigned char *) s2->fname))
12139 return ((cp1 == (const unsigned char *) s1->fname)
12140 - (cp2 == (const unsigned char *) s2->fname));
12141
12142 /* Character of current path component the same? */
12143 else if (*cp1 != *cp2)
12144 return *cp1 - *cp2;
12145 }
12146 }
12147
12148 struct file_name_acquire_data
12149 {
12150 struct file_info *files;
12151 int used_files;
12152 int max_files;
12153 };
12154
12155 /* Traversal function for the hash table. */
12156
12157 int
12158 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12159 {
12160 struct dwarf_file_data *d = *slot;
12161 struct file_info *fi;
12162 const char *f;
12163
12164 gcc_assert (fnad->max_files >= d->emitted_number);
12165
12166 if (! d->emitted_number)
12167 return 1;
12168
12169 gcc_assert (fnad->max_files != fnad->used_files);
12170
12171 fi = fnad->files + fnad->used_files++;
12172
12173 f = remap_debug_filename (d->filename);
12174
12175 /* Skip all leading "./". */
12176 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12177 f += 2;
12178
12179 /* Create a new array entry. */
12180 fi->path = f;
12181 fi->length = strlen (f);
12182 fi->file_idx = d;
12183
12184 /* Search for the file name part. */
12185 f = strrchr (f, DIR_SEPARATOR);
12186 #if defined (DIR_SEPARATOR_2)
12187 {
12188 const char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12189
12190 if (g != NULL)
12191 {
12192 if (f == NULL || f < g)
12193 f = g;
12194 }
12195 }
12196 #endif
12197
12198 fi->fname = f == NULL ? fi->path : f + 1;
12199 return 1;
12200 }
12201
12202 /* Helper function for output_file_names. Emit a FORM encoded
12203 string STR, with assembly comment start ENTRY_KIND and
12204 index IDX */
12205
12206 static void
12207 output_line_string (enum dwarf_form form, const char *str,
12208 const char *entry_kind, unsigned int idx)
12209 {
12210 switch (form)
12211 {
12212 case DW_FORM_string:
12213 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12214 break;
12215 case DW_FORM_line_strp:
12216 if (!debug_line_str_hash)
12217 debug_line_str_hash
12218 = hash_table<indirect_string_hasher>::create_ggc (10);
12219
12220 struct indirect_string_node *node;
12221 node = find_AT_string_in_table (str, debug_line_str_hash);
12222 set_indirect_string (node);
12223 node->form = form;
12224 dw2_asm_output_offset (dwarf_offset_size, node->label,
12225 debug_line_str_section, "%s: %#x: \"%s\"",
12226 entry_kind, 0, node->str);
12227 break;
12228 default:
12229 gcc_unreachable ();
12230 }
12231 }
12232
12233 /* Output the directory table and the file name table. We try to minimize
12234 the total amount of memory needed. A heuristic is used to avoid large
12235 slowdowns with many input files. */
12236
12237 static void
12238 output_file_names (void)
12239 {
12240 struct file_name_acquire_data fnad;
12241 int numfiles;
12242 struct file_info *files;
12243 struct dir_info *dirs;
12244 int *saved;
12245 int *savehere;
12246 int *backmap;
12247 int ndirs;
12248 int idx_offset;
12249 int i;
12250
12251 if (!last_emitted_file)
12252 {
12253 if (dwarf_version >= 5)
12254 {
12255 const char *comp_dir = comp_dir_string ();
12256 if (comp_dir == NULL)
12257 comp_dir = "";
12258 dw2_asm_output_data (1, 1, "Directory entry format count");
12259 enum dwarf_form str_form = DW_FORM_string;
12260 if (DWARF5_USE_DEBUG_LINE_STR)
12261 str_form = DW_FORM_line_strp;
12262 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12263 dw2_asm_output_data_uleb128 (str_form, "%s",
12264 get_DW_FORM_name (str_form));
12265 dw2_asm_output_data_uleb128 (1, "Directories count");
12266 if (str_form == DW_FORM_string)
12267 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12268 else
12269 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12270 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12271 if (filename0 == NULL)
12272 filename0 = "";
12273 #ifdef VMS_DEBUGGING_INFO
12274 dw2_asm_output_data (1, 4, "File name entry format count");
12275 #else
12276 dw2_asm_output_data (1, 2, "File name entry format count");
12277 #endif
12278 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12279 dw2_asm_output_data_uleb128 (str_form, "%s",
12280 get_DW_FORM_name (str_form));
12281 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12282 "DW_LNCT_directory_index");
12283 dw2_asm_output_data_uleb128 (DW_FORM_data1, "%s",
12284 get_DW_FORM_name (DW_FORM_data1));
12285 #ifdef VMS_DEBUGGING_INFO
12286 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12287 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12288 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12289 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12290 #endif
12291 dw2_asm_output_data_uleb128 (1, "File names count");
12292
12293 output_line_string (str_form, filename0, "File Entry", 0);
12294 dw2_asm_output_data (1, 0, NULL);
12295 #ifdef VMS_DEBUGGING_INFO
12296 dw2_asm_output_data_uleb128 (0, NULL);
12297 dw2_asm_output_data_uleb128 (0, NULL);
12298 #endif
12299 }
12300 else
12301 {
12302 dw2_asm_output_data (1, 0, "End directory table");
12303 dw2_asm_output_data (1, 0, "End file name table");
12304 }
12305 return;
12306 }
12307
12308 numfiles = last_emitted_file->emitted_number;
12309
12310 /* Allocate the various arrays we need. */
12311 files = XALLOCAVEC (struct file_info, numfiles);
12312 dirs = XALLOCAVEC (struct dir_info, numfiles);
12313
12314 fnad.files = files;
12315 fnad.used_files = 0;
12316 fnad.max_files = numfiles;
12317 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12318 gcc_assert (fnad.used_files == fnad.max_files);
12319
12320 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12321
12322 /* Find all the different directories used. */
12323 dirs[0].path = files[0].path;
12324 dirs[0].length = files[0].fname - files[0].path;
12325 dirs[0].prefix = -1;
12326 dirs[0].count = 1;
12327 dirs[0].dir_idx = 0;
12328 files[0].dir_idx = 0;
12329 ndirs = 1;
12330
12331 for (i = 1; i < numfiles; i++)
12332 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12333 && memcmp (dirs[ndirs - 1].path, files[i].path,
12334 dirs[ndirs - 1].length) == 0)
12335 {
12336 /* Same directory as last entry. */
12337 files[i].dir_idx = ndirs - 1;
12338 ++dirs[ndirs - 1].count;
12339 }
12340 else
12341 {
12342 int j;
12343
12344 /* This is a new directory. */
12345 dirs[ndirs].path = files[i].path;
12346 dirs[ndirs].length = files[i].fname - files[i].path;
12347 dirs[ndirs].count = 1;
12348 dirs[ndirs].dir_idx = ndirs;
12349 files[i].dir_idx = ndirs;
12350
12351 /* Search for a prefix. */
12352 dirs[ndirs].prefix = -1;
12353 for (j = 0; j < ndirs; j++)
12354 if (dirs[j].length < dirs[ndirs].length
12355 && dirs[j].length > 1
12356 && (dirs[ndirs].prefix == -1
12357 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12358 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12359 dirs[ndirs].prefix = j;
12360
12361 ++ndirs;
12362 }
12363
12364 /* Now to the actual work. We have to find a subset of the directories which
12365 allow expressing the file name using references to the directory table
12366 with the least amount of characters. We do not do an exhaustive search
12367 where we would have to check out every combination of every single
12368 possible prefix. Instead we use a heuristic which provides nearly optimal
12369 results in most cases and never is much off. */
12370 saved = XALLOCAVEC (int, ndirs);
12371 savehere = XALLOCAVEC (int, ndirs);
12372
12373 memset (saved, '\0', ndirs * sizeof (saved[0]));
12374 for (i = 0; i < ndirs; i++)
12375 {
12376 int j;
12377 int total;
12378
12379 /* We can always save some space for the current directory. But this
12380 does not mean it will be enough to justify adding the directory. */
12381 savehere[i] = dirs[i].length;
12382 total = (savehere[i] - saved[i]) * dirs[i].count;
12383
12384 for (j = i + 1; j < ndirs; j++)
12385 {
12386 savehere[j] = 0;
12387 if (saved[j] < dirs[i].length)
12388 {
12389 /* Determine whether the dirs[i] path is a prefix of the
12390 dirs[j] path. */
12391 int k;
12392
12393 k = dirs[j].prefix;
12394 while (k != -1 && k != (int) i)
12395 k = dirs[k].prefix;
12396
12397 if (k == (int) i)
12398 {
12399 /* Yes it is. We can possibly save some memory by
12400 writing the filenames in dirs[j] relative to
12401 dirs[i]. */
12402 savehere[j] = dirs[i].length;
12403 total += (savehere[j] - saved[j]) * dirs[j].count;
12404 }
12405 }
12406 }
12407
12408 /* Check whether we can save enough to justify adding the dirs[i]
12409 directory. */
12410 if (total > dirs[i].length + 1)
12411 {
12412 /* It's worthwhile adding. */
12413 for (j = i; j < ndirs; j++)
12414 if (savehere[j] > 0)
12415 {
12416 /* Remember how much we saved for this directory so far. */
12417 saved[j] = savehere[j];
12418
12419 /* Remember the prefix directory. */
12420 dirs[j].dir_idx = i;
12421 }
12422 }
12423 }
12424
12425 /* Emit the directory name table. */
12426 idx_offset = dirs[0].length > 0 ? 1 : 0;
12427 enum dwarf_form str_form = DW_FORM_string;
12428 enum dwarf_form idx_form = DW_FORM_udata;
12429 if (dwarf_version >= 5)
12430 {
12431 const char *comp_dir = comp_dir_string ();
12432 if (comp_dir == NULL)
12433 comp_dir = "";
12434 dw2_asm_output_data (1, 1, "Directory entry format count");
12435 if (DWARF5_USE_DEBUG_LINE_STR)
12436 str_form = DW_FORM_line_strp;
12437 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12438 dw2_asm_output_data_uleb128 (str_form, "%s",
12439 get_DW_FORM_name (str_form));
12440 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12441 if (str_form == DW_FORM_string)
12442 {
12443 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12444 for (i = 1 - idx_offset; i < ndirs; i++)
12445 dw2_asm_output_nstring (dirs[i].path,
12446 dirs[i].length
12447 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12448 "Directory Entry: %#x", i + idx_offset);
12449 }
12450 else
12451 {
12452 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12453 for (i = 1 - idx_offset; i < ndirs; i++)
12454 {
12455 const char *str
12456 = ggc_alloc_string (dirs[i].path,
12457 dirs[i].length
12458 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12459 output_line_string (str_form, str, "Directory Entry",
12460 (unsigned) i + idx_offset);
12461 }
12462 }
12463 }
12464 else
12465 {
12466 for (i = 1 - idx_offset; i < ndirs; i++)
12467 dw2_asm_output_nstring (dirs[i].path,
12468 dirs[i].length
12469 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12470 "Directory Entry: %#x", i + idx_offset);
12471
12472 dw2_asm_output_data (1, 0, "End directory table");
12473 }
12474
12475 /* We have to emit them in the order of emitted_number since that's
12476 used in the debug info generation. To do this efficiently we
12477 generate a back-mapping of the indices first. */
12478 backmap = XALLOCAVEC (int, numfiles);
12479 for (i = 0; i < numfiles; i++)
12480 backmap[files[i].file_idx->emitted_number - 1] = i;
12481
12482 if (dwarf_version >= 5)
12483 {
12484 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12485 if (filename0 == NULL)
12486 filename0 = "";
12487 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12488 DW_FORM_data2. Choose one based on the number of directories
12489 and how much space would they occupy in each encoding.
12490 If we have at most 256 directories, all indexes fit into
12491 a single byte, so DW_FORM_data1 is most compact (if there
12492 are at most 128 directories, DW_FORM_udata would be as
12493 compact as that, but not shorter and slower to decode). */
12494 if (ndirs + idx_offset <= 256)
12495 idx_form = DW_FORM_data1;
12496 /* If there are more than 65536 directories, we have to use
12497 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12498 Otherwise, compute what space would occupy if all the indexes
12499 used DW_FORM_udata - sum - and compare that to how large would
12500 be DW_FORM_data2 encoding, and pick the more efficient one. */
12501 else if (ndirs + idx_offset <= 65536)
12502 {
12503 unsigned HOST_WIDE_INT sum = 1;
12504 for (i = 0; i < numfiles; i++)
12505 {
12506 int file_idx = backmap[i];
12507 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12508 sum += size_of_uleb128 (dir_idx);
12509 }
12510 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12511 idx_form = DW_FORM_data2;
12512 }
12513 #ifdef VMS_DEBUGGING_INFO
12514 dw2_asm_output_data (1, 4, "File name entry format count");
12515 #else
12516 dw2_asm_output_data (1, 2, "File name entry format count");
12517 #endif
12518 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12519 dw2_asm_output_data_uleb128 (str_form, "%s",
12520 get_DW_FORM_name (str_form));
12521 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12522 "DW_LNCT_directory_index");
12523 dw2_asm_output_data_uleb128 (idx_form, "%s",
12524 get_DW_FORM_name (idx_form));
12525 #ifdef VMS_DEBUGGING_INFO
12526 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12527 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12528 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12529 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12530 #endif
12531 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12532
12533 output_line_string (str_form, filename0, "File Entry", 0);
12534
12535 /* Include directory index. */
12536 if (idx_form != DW_FORM_udata)
12537 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12538 0, NULL);
12539 else
12540 dw2_asm_output_data_uleb128 (0, NULL);
12541
12542 #ifdef VMS_DEBUGGING_INFO
12543 dw2_asm_output_data_uleb128 (0, NULL);
12544 dw2_asm_output_data_uleb128 (0, NULL);
12545 #endif
12546 }
12547
12548 /* Now write all the file names. */
12549 for (i = 0; i < numfiles; i++)
12550 {
12551 int file_idx = backmap[i];
12552 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12553
12554 #ifdef VMS_DEBUGGING_INFO
12555 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12556
12557 /* Setting these fields can lead to debugger miscomparisons,
12558 but VMS Debug requires them to be set correctly. */
12559
12560 int ver;
12561 long long cdt;
12562 long siz;
12563 int maxfilelen = (strlen (files[file_idx].path)
12564 + dirs[dir_idx].length
12565 + MAX_VMS_VERSION_LEN + 1);
12566 char *filebuf = XALLOCAVEC (char, maxfilelen);
12567
12568 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12569 snprintf (filebuf, maxfilelen, "%s;%d",
12570 files[file_idx].path + dirs[dir_idx].length, ver);
12571
12572 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12573
12574 /* Include directory index. */
12575 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12576 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12577 dir_idx + idx_offset, NULL);
12578 else
12579 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12580
12581 /* Modification time. */
12582 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12583 &cdt, 0, 0, 0) == 0)
12584 ? cdt : 0, NULL);
12585
12586 /* File length in bytes. */
12587 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12588 0, &siz, 0, 0) == 0)
12589 ? siz : 0, NULL);
12590 #else
12591 output_line_string (str_form,
12592 files[file_idx].path + dirs[dir_idx].length,
12593 "File Entry", (unsigned) i + 1);
12594
12595 /* Include directory index. */
12596 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12597 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12598 dir_idx + idx_offset, NULL);
12599 else
12600 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12601
12602 if (dwarf_version >= 5)
12603 continue;
12604
12605 /* Modification time. */
12606 dw2_asm_output_data_uleb128 (0, NULL);
12607
12608 /* File length in bytes. */
12609 dw2_asm_output_data_uleb128 (0, NULL);
12610 #endif /* VMS_DEBUGGING_INFO */
12611 }
12612
12613 if (dwarf_version < 5)
12614 dw2_asm_output_data (1, 0, "End file name table");
12615 }
12616
12617
12618 /* Output one line number table into the .debug_line section. */
12619
12620 static void
12621 output_one_line_info_table (dw_line_info_table *table)
12622 {
12623 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12624 unsigned int current_line = 1;
12625 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12626 dw_line_info_entry *ent, *prev_addr;
12627 size_t i;
12628 unsigned int view;
12629
12630 view = 0;
12631
12632 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12633 {
12634 switch (ent->opcode)
12635 {
12636 case LI_set_address:
12637 /* ??? Unfortunately, we have little choice here currently, and
12638 must always use the most general form. GCC does not know the
12639 address delta itself, so we can't use DW_LNS_advance_pc. Many
12640 ports do have length attributes which will give an upper bound
12641 on the address range. We could perhaps use length attributes
12642 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12643 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12644
12645 view = 0;
12646
12647 /* This can handle any delta. This takes
12648 4+DWARF2_ADDR_SIZE bytes. */
12649 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12650 debug_variable_location_views
12651 ? ", reset view to 0" : "");
12652 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12653 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12654 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12655
12656 prev_addr = ent;
12657 break;
12658
12659 case LI_adv_address:
12660 {
12661 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12662 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12663 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12664
12665 view++;
12666
12667 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12668 dw2_asm_output_delta (2, line_label, prev_label,
12669 "from %s to %s", prev_label, line_label);
12670
12671 prev_addr = ent;
12672 break;
12673 }
12674
12675 case LI_set_line:
12676 if (ent->val == current_line)
12677 {
12678 /* We still need to start a new row, so output a copy insn. */
12679 dw2_asm_output_data (1, DW_LNS_copy,
12680 "copy line %u", current_line);
12681 }
12682 else
12683 {
12684 int line_offset = ent->val - current_line;
12685 int line_delta = line_offset - DWARF_LINE_BASE;
12686
12687 current_line = ent->val;
12688 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12689 {
12690 /* This can handle deltas from -10 to 234, using the current
12691 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12692 This takes 1 byte. */
12693 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12694 "line %u", current_line);
12695 }
12696 else
12697 {
12698 /* This can handle any delta. This takes at least 4 bytes,
12699 depending on the value being encoded. */
12700 dw2_asm_output_data (1, DW_LNS_advance_line,
12701 "advance to line %u", current_line);
12702 dw2_asm_output_data_sleb128 (line_offset, NULL);
12703 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12704 }
12705 }
12706 break;
12707
12708 case LI_set_file:
12709 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12710 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12711 break;
12712
12713 case LI_set_column:
12714 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12715 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12716 break;
12717
12718 case LI_negate_stmt:
12719 current_is_stmt = !current_is_stmt;
12720 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12721 "is_stmt %d", current_is_stmt);
12722 break;
12723
12724 case LI_set_prologue_end:
12725 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12726 "set prologue end");
12727 break;
12728
12729 case LI_set_epilogue_begin:
12730 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12731 "set epilogue begin");
12732 break;
12733
12734 case LI_set_discriminator:
12735 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12736 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12737 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12738 dw2_asm_output_data_uleb128 (ent->val, NULL);
12739 break;
12740 }
12741 }
12742
12743 /* Emit debug info for the address of the end of the table. */
12744 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12745 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12746 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12747 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12748
12749 dw2_asm_output_data (1, 0, "end sequence");
12750 dw2_asm_output_data_uleb128 (1, NULL);
12751 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12752 }
12753
12754 static unsigned int output_line_info_generation;
12755
12756 /* Output the source line number correspondence information. This
12757 information goes into the .debug_line section. */
12758
12759 static void
12760 output_line_info (bool prologue_only)
12761 {
12762 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12763 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12764 bool saw_one = false;
12765 int opc;
12766
12767 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL,
12768 output_line_info_generation);
12769 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL,
12770 output_line_info_generation);
12771 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL,
12772 output_line_info_generation);
12773 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL,
12774 output_line_info_generation++);
12775
12776 if (!XCOFF_DEBUGGING_INFO)
12777 {
12778 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
12779 dw2_asm_output_data (4, 0xffffffff,
12780 "Initial length escape value indicating 64-bit DWARF extension");
12781 dw2_asm_output_delta (dwarf_offset_size, l2, l1,
12782 "Length of Source Line Info");
12783 }
12784
12785 ASM_OUTPUT_LABEL (asm_out_file, l1);
12786
12787 output_dwarf_version ();
12788 if (dwarf_version >= 5)
12789 {
12790 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12791 dw2_asm_output_data (1, 0, "Segment Size");
12792 }
12793 dw2_asm_output_delta (dwarf_offset_size, p2, p1, "Prolog Length");
12794 ASM_OUTPUT_LABEL (asm_out_file, p1);
12795
12796 /* Define the architecture-dependent minimum instruction length (in bytes).
12797 In this implementation of DWARF, this field is used for information
12798 purposes only. Since GCC generates assembly language, we have no
12799 a priori knowledge of how many instruction bytes are generated for each
12800 source line, and therefore can use only the DW_LNE_set_address and
12801 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12802 this as '1', which is "correct enough" for all architectures,
12803 and don't let the target override. */
12804 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12805
12806 if (dwarf_version >= 4)
12807 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12808 "Maximum Operations Per Instruction");
12809 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12810 "Default is_stmt_start flag");
12811 dw2_asm_output_data (1, DWARF_LINE_BASE,
12812 "Line Base Value (Special Opcodes)");
12813 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12814 "Line Range Value (Special Opcodes)");
12815 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12816 "Special Opcode Base");
12817
12818 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12819 {
12820 int n_op_args;
12821 switch (opc)
12822 {
12823 case DW_LNS_advance_pc:
12824 case DW_LNS_advance_line:
12825 case DW_LNS_set_file:
12826 case DW_LNS_set_column:
12827 case DW_LNS_fixed_advance_pc:
12828 case DW_LNS_set_isa:
12829 n_op_args = 1;
12830 break;
12831 default:
12832 n_op_args = 0;
12833 break;
12834 }
12835
12836 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12837 opc, n_op_args);
12838 }
12839
12840 /* Write out the information about the files we use. */
12841 output_file_names ();
12842 ASM_OUTPUT_LABEL (asm_out_file, p2);
12843 if (prologue_only)
12844 {
12845 /* Output the marker for the end of the line number info. */
12846 ASM_OUTPUT_LABEL (asm_out_file, l2);
12847 return;
12848 }
12849
12850 if (separate_line_info)
12851 {
12852 dw_line_info_table *table;
12853 size_t i;
12854
12855 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12856 if (table->in_use)
12857 {
12858 output_one_line_info_table (table);
12859 saw_one = true;
12860 }
12861 }
12862 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12863 {
12864 output_one_line_info_table (cold_text_section_line_info);
12865 saw_one = true;
12866 }
12867
12868 /* ??? Some Darwin linkers crash on a .debug_line section with no
12869 sequences. Further, merely a DW_LNE_end_sequence entry is not
12870 sufficient -- the address column must also be initialized.
12871 Make sure to output at least one set_address/end_sequence pair,
12872 choosing .text since that section is always present. */
12873 if (text_section_line_info->in_use || !saw_one)
12874 output_one_line_info_table (text_section_line_info);
12875
12876 /* Output the marker for the end of the line number info. */
12877 ASM_OUTPUT_LABEL (asm_out_file, l2);
12878 }
12879 \f
12880 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12881
12882 static inline bool
12883 need_endianity_attribute_p (bool reverse)
12884 {
12885 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12886 }
12887
12888 /* Given a pointer to a tree node for some base type, return a pointer to
12889 a DIE that describes the given type. REVERSE is true if the type is
12890 to be interpreted in the reverse storage order wrt the target order.
12891
12892 This routine must only be called for GCC type nodes that correspond to
12893 Dwarf base (fundamental) types. */
12894
12895 static dw_die_ref
12896 base_type_die (tree type, bool reverse)
12897 {
12898 dw_die_ref base_type_result;
12899 enum dwarf_type encoding;
12900 bool fpt_used = false;
12901 struct fixed_point_type_info fpt_info;
12902 tree type_bias = NULL_TREE;
12903
12904 /* If this is a subtype that should not be emitted as a subrange type,
12905 use the base type. See subrange_type_for_debug_p. */
12906 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12907 type = TREE_TYPE (type);
12908
12909 switch (TREE_CODE (type))
12910 {
12911 case INTEGER_TYPE:
12912 if ((dwarf_version >= 4 || !dwarf_strict)
12913 && TYPE_NAME (type)
12914 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12915 && DECL_IS_UNDECLARED_BUILTIN (TYPE_NAME (type))
12916 && DECL_NAME (TYPE_NAME (type)))
12917 {
12918 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12919 if (strcmp (name, "char16_t") == 0
12920 || strcmp (name, "char32_t") == 0)
12921 {
12922 encoding = DW_ATE_UTF;
12923 break;
12924 }
12925 }
12926 if ((dwarf_version >= 3 || !dwarf_strict)
12927 && lang_hooks.types.get_fixed_point_type_info)
12928 {
12929 memset (&fpt_info, 0, sizeof (fpt_info));
12930 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12931 {
12932 fpt_used = true;
12933 encoding = ((TYPE_UNSIGNED (type))
12934 ? DW_ATE_unsigned_fixed
12935 : DW_ATE_signed_fixed);
12936 break;
12937 }
12938 }
12939 if (TYPE_STRING_FLAG (type))
12940 {
12941 if (TYPE_UNSIGNED (type))
12942 encoding = DW_ATE_unsigned_char;
12943 else
12944 encoding = DW_ATE_signed_char;
12945 }
12946 else if (TYPE_UNSIGNED (type))
12947 encoding = DW_ATE_unsigned;
12948 else
12949 encoding = DW_ATE_signed;
12950
12951 if (!dwarf_strict
12952 && lang_hooks.types.get_type_bias)
12953 type_bias = lang_hooks.types.get_type_bias (type);
12954 break;
12955
12956 case REAL_TYPE:
12957 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12958 {
12959 if (dwarf_version >= 3 || !dwarf_strict)
12960 encoding = DW_ATE_decimal_float;
12961 else
12962 encoding = DW_ATE_lo_user;
12963 }
12964 else
12965 encoding = DW_ATE_float;
12966 break;
12967
12968 case FIXED_POINT_TYPE:
12969 if (!(dwarf_version >= 3 || !dwarf_strict))
12970 encoding = DW_ATE_lo_user;
12971 else if (TYPE_UNSIGNED (type))
12972 encoding = DW_ATE_unsigned_fixed;
12973 else
12974 encoding = DW_ATE_signed_fixed;
12975 break;
12976
12977 /* Dwarf2 doesn't know anything about complex ints, so use
12978 a user defined type for it. */
12979 case COMPLEX_TYPE:
12980 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12981 encoding = DW_ATE_complex_float;
12982 else
12983 encoding = DW_ATE_lo_user;
12984 break;
12985
12986 case BOOLEAN_TYPE:
12987 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12988 encoding = DW_ATE_boolean;
12989 break;
12990
12991 default:
12992 /* No other TREE_CODEs are Dwarf fundamental types. */
12993 gcc_unreachable ();
12994 }
12995
12996 base_type_result = new_die_raw (DW_TAG_base_type);
12997
12998 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12999 int_size_in_bytes (type));
13000 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
13001
13002 if (need_endianity_attribute_p (reverse))
13003 add_AT_unsigned (base_type_result, DW_AT_endianity,
13004 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
13005
13006 add_alignment_attribute (base_type_result, type);
13007
13008 if (fpt_used)
13009 {
13010 switch (fpt_info.scale_factor_kind)
13011 {
13012 case fixed_point_scale_factor_binary:
13013 add_AT_int (base_type_result, DW_AT_binary_scale,
13014 fpt_info.scale_factor.binary);
13015 break;
13016
13017 case fixed_point_scale_factor_decimal:
13018 add_AT_int (base_type_result, DW_AT_decimal_scale,
13019 fpt_info.scale_factor.decimal);
13020 break;
13021
13022 case fixed_point_scale_factor_arbitrary:
13023 /* Arbitrary scale factors cannot be described in standard DWARF. */
13024 if (!dwarf_strict)
13025 {
13026 /* Describe the scale factor as a rational constant. */
13027 const dw_die_ref scale_factor
13028 = new_die (DW_TAG_constant, comp_unit_die (), type);
13029
13030 add_scalar_info (scale_factor, DW_AT_GNU_numerator,
13031 fpt_info.scale_factor.arbitrary.numerator,
13032 dw_scalar_form_constant, NULL);
13033 add_scalar_info (scale_factor, DW_AT_GNU_denominator,
13034 fpt_info.scale_factor.arbitrary.denominator,
13035 dw_scalar_form_constant, NULL);
13036
13037 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
13038 }
13039 break;
13040
13041 default:
13042 gcc_unreachable ();
13043 }
13044 }
13045
13046 if (type_bias)
13047 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
13048 dw_scalar_form_constant
13049 | dw_scalar_form_exprloc
13050 | dw_scalar_form_reference,
13051 NULL);
13052
13053 return base_type_result;
13054 }
13055
13056 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
13057 named 'auto' in its type: return true for it, false otherwise. */
13058
13059 static inline bool
13060 is_cxx_auto (tree type)
13061 {
13062 if (is_cxx ())
13063 {
13064 tree name = TYPE_IDENTIFIER (type);
13065 if (name == get_identifier ("auto")
13066 || name == get_identifier ("decltype(auto)"))
13067 return true;
13068 }
13069 return false;
13070 }
13071
13072 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
13073 given input type is a Dwarf "fundamental" type. Otherwise return null. */
13074
13075 static inline int
13076 is_base_type (tree type)
13077 {
13078 switch (TREE_CODE (type))
13079 {
13080 case INTEGER_TYPE:
13081 case REAL_TYPE:
13082 case FIXED_POINT_TYPE:
13083 case COMPLEX_TYPE:
13084 case BOOLEAN_TYPE:
13085 return 1;
13086
13087 case VOID_TYPE:
13088 case OPAQUE_TYPE:
13089 case ARRAY_TYPE:
13090 case RECORD_TYPE:
13091 case UNION_TYPE:
13092 case QUAL_UNION_TYPE:
13093 case ENUMERAL_TYPE:
13094 case FUNCTION_TYPE:
13095 case METHOD_TYPE:
13096 case POINTER_TYPE:
13097 case REFERENCE_TYPE:
13098 case NULLPTR_TYPE:
13099 case OFFSET_TYPE:
13100 case LANG_TYPE:
13101 case VECTOR_TYPE:
13102 return 0;
13103
13104 default:
13105 if (is_cxx_auto (type))
13106 return 0;
13107 gcc_unreachable ();
13108 }
13109
13110 return 0;
13111 }
13112
13113 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
13114 node, return the size in bits for the type if it is a constant, or else
13115 return the alignment for the type if the type's size is not constant, or
13116 else return BITS_PER_WORD if the type actually turns out to be an
13117 ERROR_MARK node. */
13118
13119 static inline unsigned HOST_WIDE_INT
13120 simple_type_size_in_bits (const_tree type)
13121 {
13122 if (TREE_CODE (type) == ERROR_MARK)
13123 return BITS_PER_WORD;
13124 else if (TYPE_SIZE (type) == NULL_TREE)
13125 return 0;
13126 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13127 return tree_to_uhwi (TYPE_SIZE (type));
13128 else
13129 return TYPE_ALIGN (type);
13130 }
13131
13132 /* Similarly, but return an offset_int instead of UHWI. */
13133
13134 static inline offset_int
13135 offset_int_type_size_in_bits (const_tree type)
13136 {
13137 if (TREE_CODE (type) == ERROR_MARK)
13138 return BITS_PER_WORD;
13139 else if (TYPE_SIZE (type) == NULL_TREE)
13140 return 0;
13141 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13142 return wi::to_offset (TYPE_SIZE (type));
13143 else
13144 return TYPE_ALIGN (type);
13145 }
13146
13147 /* Given a pointer to a tree node for a subrange type, return a pointer
13148 to a DIE that describes the given type. */
13149
13150 static dw_die_ref
13151 subrange_type_die (tree type, tree low, tree high, tree bias,
13152 dw_die_ref context_die)
13153 {
13154 dw_die_ref subrange_die;
13155 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13156
13157 if (context_die == NULL)
13158 context_die = comp_unit_die ();
13159
13160 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13161
13162 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13163 {
13164 /* The size of the subrange type and its base type do not match,
13165 so we need to generate a size attribute for the subrange type. */
13166 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13167 }
13168
13169 add_alignment_attribute (subrange_die, type);
13170
13171 if (low)
13172 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13173 if (high)
13174 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13175 if (bias && !dwarf_strict)
13176 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13177 dw_scalar_form_constant
13178 | dw_scalar_form_exprloc
13179 | dw_scalar_form_reference,
13180 NULL);
13181
13182 return subrange_die;
13183 }
13184
13185 /* Returns the (const and/or volatile) cv_qualifiers associated with
13186 the decl node. This will normally be augmented with the
13187 cv_qualifiers of the underlying type in add_type_attribute. */
13188
13189 static int
13190 decl_quals (const_tree decl)
13191 {
13192 return ((TREE_READONLY (decl)
13193 /* The C++ front-end correctly marks reference-typed
13194 variables as readonly, but from a language (and debug
13195 info) standpoint they are not const-qualified. */
13196 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13197 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13198 | (TREE_THIS_VOLATILE (decl)
13199 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13200 }
13201
13202 /* Determine the TYPE whose qualifiers match the largest strict subset
13203 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13204 qualifiers outside QUAL_MASK. */
13205
13206 static int
13207 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13208 {
13209 tree t;
13210 int best_rank = 0, best_qual = 0, max_rank;
13211
13212 type_quals &= qual_mask;
13213 max_rank = popcount_hwi (type_quals) - 1;
13214
13215 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13216 t = TYPE_NEXT_VARIANT (t))
13217 {
13218 int q = TYPE_QUALS (t) & qual_mask;
13219
13220 if ((q & type_quals) == q && q != type_quals
13221 && check_base_type (t, type))
13222 {
13223 int rank = popcount_hwi (q);
13224
13225 if (rank > best_rank)
13226 {
13227 best_rank = rank;
13228 best_qual = q;
13229 }
13230 }
13231 }
13232
13233 return best_qual;
13234 }
13235
13236 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13237 static const dwarf_qual_info_t dwarf_qual_info[] =
13238 {
13239 { TYPE_QUAL_CONST, DW_TAG_const_type },
13240 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13241 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13242 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13243 };
13244 static const unsigned int dwarf_qual_info_size
13245 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13246
13247 /* If DIE is a qualified DIE of some base DIE with the same parent,
13248 return the base DIE, otherwise return NULL. Set MASK to the
13249 qualifiers added compared to the returned DIE. */
13250
13251 static dw_die_ref
13252 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13253 {
13254 unsigned int i;
13255 for (i = 0; i < dwarf_qual_info_size; i++)
13256 if (die->die_tag == dwarf_qual_info[i].t)
13257 break;
13258 if (i == dwarf_qual_info_size)
13259 return NULL;
13260 if (vec_safe_length (die->die_attr) != 1)
13261 return NULL;
13262 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13263 if (type == NULL || type->die_parent != die->die_parent)
13264 return NULL;
13265 *mask |= dwarf_qual_info[i].q;
13266 if (depth)
13267 {
13268 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13269 if (ret)
13270 return ret;
13271 }
13272 return type;
13273 }
13274
13275 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13276 entry that chains the modifiers specified by CV_QUALS in front of the
13277 given type. REVERSE is true if the type is to be interpreted in the
13278 reverse storage order wrt the target order. */
13279
13280 static dw_die_ref
13281 modified_type_die (tree type, int cv_quals, bool reverse,
13282 dw_die_ref context_die)
13283 {
13284 enum tree_code code = TREE_CODE (type);
13285 dw_die_ref mod_type_die;
13286 dw_die_ref sub_die = NULL;
13287 tree item_type = NULL;
13288 tree qualified_type;
13289 tree name, low, high;
13290 dw_die_ref mod_scope;
13291 /* Only these cv-qualifiers are currently handled. */
13292 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13293 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13294 ENCODE_QUAL_ADDR_SPACE(~0U));
13295 const bool reverse_base_type
13296 = need_endianity_attribute_p (reverse) && is_base_type (type);
13297
13298 if (code == ERROR_MARK)
13299 return NULL;
13300
13301 if (lang_hooks.types.get_debug_type)
13302 {
13303 tree debug_type = lang_hooks.types.get_debug_type (type);
13304
13305 if (debug_type != NULL_TREE && debug_type != type)
13306 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13307 }
13308
13309 cv_quals &= cv_qual_mask;
13310
13311 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13312 tag modifier (and not an attribute) old consumers won't be able
13313 to handle it. */
13314 if (dwarf_version < 3)
13315 cv_quals &= ~TYPE_QUAL_RESTRICT;
13316
13317 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13318 if (dwarf_version < 5)
13319 cv_quals &= ~TYPE_QUAL_ATOMIC;
13320
13321 /* See if we already have the appropriately qualified variant of
13322 this type. */
13323 qualified_type = get_qualified_type (type, cv_quals);
13324
13325 if (qualified_type == sizetype)
13326 {
13327 /* Try not to expose the internal sizetype type's name. */
13328 if (TYPE_NAME (qualified_type)
13329 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13330 {
13331 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13332
13333 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13334 && (TYPE_PRECISION (t)
13335 == TYPE_PRECISION (qualified_type))
13336 && (TYPE_UNSIGNED (t)
13337 == TYPE_UNSIGNED (qualified_type)));
13338 qualified_type = t;
13339 }
13340 else if (qualified_type == sizetype
13341 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13342 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13343 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13344 qualified_type = size_type_node;
13345 if (type == sizetype)
13346 type = qualified_type;
13347 }
13348
13349 /* If we do, then we can just use its DIE, if it exists. */
13350 if (qualified_type)
13351 {
13352 mod_type_die = lookup_type_die (qualified_type);
13353
13354 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13355 dealt with specially: the DIE with the attribute, if it exists, is
13356 placed immediately after the regular DIE for the same base type. */
13357 if (mod_type_die
13358 && (!reverse_base_type
13359 || ((mod_type_die = mod_type_die->die_sib) != NULL
13360 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13361 return mod_type_die;
13362 }
13363
13364 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13365
13366 /* Handle C typedef types. */
13367 if (name
13368 && TREE_CODE (name) == TYPE_DECL
13369 && DECL_ORIGINAL_TYPE (name)
13370 && !DECL_ARTIFICIAL (name))
13371 {
13372 tree dtype = TREE_TYPE (name);
13373
13374 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13375 if (qualified_type == dtype && !reverse_base_type)
13376 {
13377 tree origin = decl_ultimate_origin (name);
13378
13379 /* Typedef variants that have an abstract origin don't get their own
13380 type DIE (see gen_typedef_die), so fall back on the ultimate
13381 abstract origin instead. */
13382 if (origin != NULL && origin != name)
13383 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13384 context_die);
13385
13386 /* For a named type, use the typedef. */
13387 gen_type_die (qualified_type, context_die);
13388 return lookup_type_die (qualified_type);
13389 }
13390 else
13391 {
13392 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13393 dquals &= cv_qual_mask;
13394 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13395 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13396 /* cv-unqualified version of named type. Just use
13397 the unnamed type to which it refers. */
13398 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13399 reverse, context_die);
13400 /* Else cv-qualified version of named type; fall through. */
13401 }
13402 }
13403
13404 mod_scope = scope_die_for (type, context_die);
13405
13406 if (cv_quals)
13407 {
13408 int sub_quals = 0, first_quals = 0;
13409 unsigned i;
13410 dw_die_ref first = NULL, last = NULL;
13411
13412 /* Determine a lesser qualified type that most closely matches
13413 this one. Then generate DW_TAG_* entries for the remaining
13414 qualifiers. */
13415 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13416 cv_qual_mask);
13417 if (sub_quals && use_debug_types)
13418 {
13419 bool needed = false;
13420 /* If emitting type units, make sure the order of qualifiers
13421 is canonical. Thus, start from unqualified type if
13422 an earlier qualifier is missing in sub_quals, but some later
13423 one is present there. */
13424 for (i = 0; i < dwarf_qual_info_size; i++)
13425 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13426 needed = true;
13427 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13428 {
13429 sub_quals = 0;
13430 break;
13431 }
13432 }
13433 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13434 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13435 {
13436 /* As not all intermediate qualified DIEs have corresponding
13437 tree types, ensure that qualified DIEs in the same scope
13438 as their DW_AT_type are emitted after their DW_AT_type,
13439 only with other qualified DIEs for the same type possibly
13440 in between them. Determine the range of such qualified
13441 DIEs now (first being the base type, last being corresponding
13442 last qualified DIE for it). */
13443 unsigned int count = 0;
13444 first = qualified_die_p (mod_type_die, &first_quals,
13445 dwarf_qual_info_size);
13446 if (first == NULL)
13447 first = mod_type_die;
13448 gcc_assert ((first_quals & ~sub_quals) == 0);
13449 for (count = 0, last = first;
13450 count < (1U << dwarf_qual_info_size);
13451 count++, last = last->die_sib)
13452 {
13453 int quals = 0;
13454 if (last == mod_scope->die_child)
13455 break;
13456 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13457 != first)
13458 break;
13459 }
13460 }
13461
13462 for (i = 0; i < dwarf_qual_info_size; i++)
13463 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13464 {
13465 dw_die_ref d;
13466 if (first && first != last)
13467 {
13468 for (d = first->die_sib; ; d = d->die_sib)
13469 {
13470 int quals = 0;
13471 qualified_die_p (d, &quals, dwarf_qual_info_size);
13472 if (quals == (first_quals | dwarf_qual_info[i].q))
13473 break;
13474 if (d == last)
13475 {
13476 d = NULL;
13477 break;
13478 }
13479 }
13480 if (d)
13481 {
13482 mod_type_die = d;
13483 continue;
13484 }
13485 }
13486 if (first)
13487 {
13488 d = new_die_raw (dwarf_qual_info[i].t);
13489 add_child_die_after (mod_scope, d, last);
13490 last = d;
13491 }
13492 else
13493 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13494 if (mod_type_die)
13495 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13496 mod_type_die = d;
13497 first_quals |= dwarf_qual_info[i].q;
13498 }
13499 }
13500 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13501 {
13502 dwarf_tag tag = DW_TAG_pointer_type;
13503 if (code == REFERENCE_TYPE)
13504 {
13505 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13506 tag = DW_TAG_rvalue_reference_type;
13507 else
13508 tag = DW_TAG_reference_type;
13509 }
13510 mod_type_die = new_die (tag, mod_scope, type);
13511
13512 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13513 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13514 add_alignment_attribute (mod_type_die, type);
13515 item_type = TREE_TYPE (type);
13516
13517 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13518 if (!ADDR_SPACE_GENERIC_P (as))
13519 {
13520 int action = targetm.addr_space.debug (as);
13521 if (action >= 0)
13522 {
13523 /* Positive values indicate an address_class. */
13524 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13525 }
13526 else
13527 {
13528 /* Negative values indicate an (inverted) segment base reg. */
13529 dw_loc_descr_ref d
13530 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13531 add_AT_loc (mod_type_die, DW_AT_segment, d);
13532 }
13533 }
13534 }
13535 else if (code == INTEGER_TYPE
13536 && TREE_TYPE (type) != NULL_TREE
13537 && subrange_type_for_debug_p (type, &low, &high))
13538 {
13539 tree bias = NULL_TREE;
13540 if (lang_hooks.types.get_type_bias)
13541 bias = lang_hooks.types.get_type_bias (type);
13542 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13543 item_type = TREE_TYPE (type);
13544 }
13545 else if (is_base_type (type))
13546 {
13547 mod_type_die = base_type_die (type, reverse);
13548
13549 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13550 if (reverse_base_type)
13551 {
13552 dw_die_ref after_die
13553 = modified_type_die (type, cv_quals, false, context_die);
13554 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13555 }
13556 else
13557 add_child_die (comp_unit_die (), mod_type_die);
13558
13559 add_pubtype (type, mod_type_die);
13560 }
13561 else
13562 {
13563 gen_type_die (type, context_die);
13564
13565 /* We have to get the type_main_variant here (and pass that to the
13566 `lookup_type_die' routine) because the ..._TYPE node we have
13567 might simply be a *copy* of some original type node (where the
13568 copy was created to help us keep track of typedef names) and
13569 that copy might have a different TYPE_UID from the original
13570 ..._TYPE node. */
13571 if (TREE_CODE (type) == FUNCTION_TYPE
13572 || TREE_CODE (type) == METHOD_TYPE)
13573 {
13574 /* For function/method types, can't just use type_main_variant here,
13575 because that can have different ref-qualifiers for C++,
13576 but try to canonicalize. */
13577 tree main = TYPE_MAIN_VARIANT (type);
13578 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13579 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13580 && check_base_type (t, main)
13581 && check_lang_type (t, type))
13582 return lookup_type_die (t);
13583 return lookup_type_die (type);
13584 }
13585 else if (TREE_CODE (type) != VECTOR_TYPE
13586 && TREE_CODE (type) != ARRAY_TYPE)
13587 return lookup_type_die (type_main_variant (type));
13588 else
13589 /* Vectors have the debugging information in the type,
13590 not the main variant. */
13591 return lookup_type_die (type);
13592 }
13593
13594 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13595 don't output a DW_TAG_typedef, since there isn't one in the
13596 user's program; just attach a DW_AT_name to the type.
13597 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13598 if the base type already has the same name. */
13599 if (name
13600 && ((TREE_CODE (name) != TYPE_DECL
13601 && (qualified_type == TYPE_MAIN_VARIANT (type)
13602 || (cv_quals == TYPE_UNQUALIFIED)))
13603 || (TREE_CODE (name) == TYPE_DECL
13604 && TREE_TYPE (name) == qualified_type
13605 && DECL_NAME (name))))
13606 {
13607 if (TREE_CODE (name) == TYPE_DECL)
13608 /* Could just call add_name_and_src_coords_attributes here,
13609 but since this is a builtin type it doesn't have any
13610 useful source coordinates anyway. */
13611 name = DECL_NAME (name);
13612 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13613 }
13614 /* This probably indicates a bug. */
13615 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13616 {
13617 name = TYPE_IDENTIFIER (type);
13618 add_name_attribute (mod_type_die,
13619 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13620 }
13621
13622 if (qualified_type && !reverse_base_type)
13623 equate_type_number_to_die (qualified_type, mod_type_die);
13624
13625 if (item_type)
13626 /* We must do this after the equate_type_number_to_die call, in case
13627 this is a recursive type. This ensures that the modified_type_die
13628 recursion will terminate even if the type is recursive. Recursive
13629 types are possible in Ada. */
13630 sub_die = modified_type_die (item_type,
13631 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13632 reverse,
13633 context_die);
13634
13635 if (sub_die != NULL)
13636 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13637
13638 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13639 if (TYPE_ARTIFICIAL (type))
13640 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13641
13642 return mod_type_die;
13643 }
13644
13645 /* Generate DIEs for the generic parameters of T.
13646 T must be either a generic type or a generic function.
13647 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13648
13649 static void
13650 gen_generic_params_dies (tree t)
13651 {
13652 tree parms, args;
13653 int parms_num, i;
13654 dw_die_ref die = NULL;
13655 int non_default;
13656
13657 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13658 return;
13659
13660 if (TYPE_P (t))
13661 die = lookup_type_die (t);
13662 else if (DECL_P (t))
13663 die = lookup_decl_die (t);
13664
13665 gcc_assert (die);
13666
13667 parms = lang_hooks.get_innermost_generic_parms (t);
13668 if (!parms)
13669 /* T has no generic parameter. It means T is neither a generic type
13670 or function. End of story. */
13671 return;
13672
13673 parms_num = TREE_VEC_LENGTH (parms);
13674 args = lang_hooks.get_innermost_generic_args (t);
13675 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13676 non_default = int_cst_value (TREE_CHAIN (args));
13677 else
13678 non_default = TREE_VEC_LENGTH (args);
13679 for (i = 0; i < parms_num; i++)
13680 {
13681 tree parm, arg, arg_pack_elems;
13682 dw_die_ref parm_die;
13683
13684 parm = TREE_VEC_ELT (parms, i);
13685 arg = TREE_VEC_ELT (args, i);
13686 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13687 gcc_assert (parm && TREE_VALUE (parm) && arg);
13688
13689 if (parm && TREE_VALUE (parm) && arg)
13690 {
13691 /* If PARM represents a template parameter pack,
13692 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13693 by DW_TAG_template_*_parameter DIEs for the argument
13694 pack elements of ARG. Note that ARG would then be
13695 an argument pack. */
13696 if (arg_pack_elems)
13697 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13698 arg_pack_elems,
13699 die);
13700 else
13701 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13702 true /* emit name */, die);
13703 if (i >= non_default)
13704 add_AT_flag (parm_die, DW_AT_default_value, 1);
13705 }
13706 }
13707 }
13708
13709 /* Create and return a DIE for PARM which should be
13710 the representation of a generic type parameter.
13711 For instance, in the C++ front end, PARM would be a template parameter.
13712 ARG is the argument to PARM.
13713 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13714 name of the PARM.
13715 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13716 as a child node. */
13717
13718 static dw_die_ref
13719 generic_parameter_die (tree parm, tree arg,
13720 bool emit_name_p,
13721 dw_die_ref parent_die)
13722 {
13723 dw_die_ref tmpl_die = NULL;
13724 const char *name = NULL;
13725
13726 /* C++20 accepts class literals as template parameters, and var
13727 decls with initializers represent them. The VAR_DECLs would be
13728 rejected, but we can take the DECL_INITIAL constructor and
13729 attempt to expand it. */
13730 if (arg && VAR_P (arg))
13731 arg = DECL_INITIAL (arg);
13732
13733 if (!parm || !DECL_NAME (parm) || !arg)
13734 return NULL;
13735
13736 /* We support non-type generic parameters and arguments,
13737 type generic parameters and arguments, as well as
13738 generic generic parameters (a.k.a. template template parameters in C++)
13739 and arguments. */
13740 if (TREE_CODE (parm) == PARM_DECL)
13741 /* PARM is a nontype generic parameter */
13742 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13743 else if (TREE_CODE (parm) == TYPE_DECL)
13744 /* PARM is a type generic parameter. */
13745 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13746 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13747 /* PARM is a generic generic parameter.
13748 Its DIE is a GNU extension. It shall have a
13749 DW_AT_name attribute to represent the name of the template template
13750 parameter, and a DW_AT_GNU_template_name attribute to represent the
13751 name of the template template argument. */
13752 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13753 parent_die, parm);
13754 else
13755 gcc_unreachable ();
13756
13757 if (tmpl_die)
13758 {
13759 tree tmpl_type;
13760
13761 /* If PARM is a generic parameter pack, it means we are
13762 emitting debug info for a template argument pack element.
13763 In other terms, ARG is a template argument pack element.
13764 In that case, we don't emit any DW_AT_name attribute for
13765 the die. */
13766 if (emit_name_p)
13767 {
13768 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13769 gcc_assert (name);
13770 add_AT_string (tmpl_die, DW_AT_name, name);
13771 }
13772
13773 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13774 {
13775 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13776 TMPL_DIE should have a child DW_AT_type attribute that is set
13777 to the type of the argument to PARM, which is ARG.
13778 If PARM is a type generic parameter, TMPL_DIE should have a
13779 child DW_AT_type that is set to ARG. */
13780 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13781 add_type_attribute (tmpl_die, tmpl_type,
13782 (TREE_THIS_VOLATILE (tmpl_type)
13783 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13784 false, parent_die);
13785 }
13786 else
13787 {
13788 /* So TMPL_DIE is a DIE representing a
13789 a generic generic template parameter, a.k.a template template
13790 parameter in C++ and arg is a template. */
13791
13792 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13793 to the name of the argument. */
13794 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13795 if (name)
13796 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13797 }
13798
13799 if (TREE_CODE (parm) == PARM_DECL)
13800 /* So PARM is a non-type generic parameter.
13801 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13802 attribute of TMPL_DIE which value represents the value
13803 of ARG.
13804 We must be careful here:
13805 The value of ARG might reference some function decls.
13806 We might currently be emitting debug info for a generic
13807 type and types are emitted before function decls, we don't
13808 know if the function decls referenced by ARG will actually be
13809 emitted after cgraph computations.
13810 So must defer the generation of the DW_AT_const_value to
13811 after cgraph is ready. */
13812 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13813 }
13814
13815 return tmpl_die;
13816 }
13817
13818 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13819 PARM_PACK must be a template parameter pack. The returned DIE
13820 will be child DIE of PARENT_DIE. */
13821
13822 static dw_die_ref
13823 template_parameter_pack_die (tree parm_pack,
13824 tree parm_pack_args,
13825 dw_die_ref parent_die)
13826 {
13827 dw_die_ref die;
13828 int j;
13829
13830 gcc_assert (parent_die && parm_pack);
13831
13832 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13833 add_name_and_src_coords_attributes (die, parm_pack);
13834 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13835 generic_parameter_die (parm_pack,
13836 TREE_VEC_ELT (parm_pack_args, j),
13837 false /* Don't emit DW_AT_name */,
13838 die);
13839 return die;
13840 }
13841
13842 /* Return the DBX register number described by a given RTL node. */
13843
13844 static unsigned int
13845 dbx_reg_number (const_rtx rtl)
13846 {
13847 unsigned regno = REGNO (rtl);
13848
13849 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13850
13851 #ifdef LEAF_REG_REMAP
13852 if (crtl->uses_only_leaf_regs)
13853 {
13854 int leaf_reg = LEAF_REG_REMAP (regno);
13855 if (leaf_reg != -1)
13856 regno = (unsigned) leaf_reg;
13857 }
13858 #endif
13859
13860 regno = DBX_REGISTER_NUMBER (regno);
13861 gcc_assert (regno != INVALID_REGNUM);
13862 return regno;
13863 }
13864
13865 /* Optionally add a DW_OP_piece term to a location description expression.
13866 DW_OP_piece is only added if the location description expression already
13867 doesn't end with DW_OP_piece. */
13868
13869 static void
13870 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13871 {
13872 dw_loc_descr_ref loc;
13873
13874 if (*list_head != NULL)
13875 {
13876 /* Find the end of the chain. */
13877 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13878 ;
13879
13880 if (loc->dw_loc_opc != DW_OP_piece)
13881 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13882 }
13883 }
13884
13885 /* Return a location descriptor that designates a machine register or
13886 zero if there is none. */
13887
13888 static dw_loc_descr_ref
13889 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13890 {
13891 rtx regs;
13892
13893 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13894 return 0;
13895
13896 /* We only use "frame base" when we're sure we're talking about the
13897 post-prologue local stack frame. We do this by *not* running
13898 register elimination until this point, and recognizing the special
13899 argument pointer and soft frame pointer rtx's.
13900 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13901 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13902 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13903 {
13904 dw_loc_descr_ref result = NULL;
13905
13906 if (dwarf_version >= 4 || !dwarf_strict)
13907 {
13908 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13909 initialized);
13910 if (result)
13911 add_loc_descr (&result,
13912 new_loc_descr (DW_OP_stack_value, 0, 0));
13913 }
13914 return result;
13915 }
13916
13917 regs = targetm.dwarf_register_span (rtl);
13918
13919 if (REG_NREGS (rtl) > 1 || regs)
13920 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13921 else
13922 {
13923 unsigned int dbx_regnum = dbx_reg_number (rtl);
13924 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13925 return 0;
13926 return one_reg_loc_descriptor (dbx_regnum, initialized);
13927 }
13928 }
13929
13930 /* Return a location descriptor that designates a machine register for
13931 a given hard register number. */
13932
13933 static dw_loc_descr_ref
13934 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13935 {
13936 dw_loc_descr_ref reg_loc_descr;
13937
13938 if (regno <= 31)
13939 reg_loc_descr
13940 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13941 else
13942 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13943
13944 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13945 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13946
13947 return reg_loc_descr;
13948 }
13949
13950 /* Given an RTL of a register, return a location descriptor that
13951 designates a value that spans more than one register. */
13952
13953 static dw_loc_descr_ref
13954 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13955 enum var_init_status initialized)
13956 {
13957 int size, i;
13958 dw_loc_descr_ref loc_result = NULL;
13959
13960 /* Simple, contiguous registers. */
13961 if (regs == NULL_RTX)
13962 {
13963 unsigned reg = REGNO (rtl);
13964 int nregs;
13965
13966 #ifdef LEAF_REG_REMAP
13967 if (crtl->uses_only_leaf_regs)
13968 {
13969 int leaf_reg = LEAF_REG_REMAP (reg);
13970 if (leaf_reg != -1)
13971 reg = (unsigned) leaf_reg;
13972 }
13973 #endif
13974
13975 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13976 nregs = REG_NREGS (rtl);
13977
13978 /* At present we only track constant-sized pieces. */
13979 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13980 return NULL;
13981 size /= nregs;
13982
13983 loc_result = NULL;
13984 while (nregs--)
13985 {
13986 dw_loc_descr_ref t;
13987
13988 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13989 VAR_INIT_STATUS_INITIALIZED);
13990 add_loc_descr (&loc_result, t);
13991 add_loc_descr_op_piece (&loc_result, size);
13992 ++reg;
13993 }
13994 return loc_result;
13995 }
13996
13997 /* Now onto stupid register sets in non contiguous locations. */
13998
13999 gcc_assert (GET_CODE (regs) == PARALLEL);
14000
14001 /* At present we only track constant-sized pieces. */
14002 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
14003 return NULL;
14004 loc_result = NULL;
14005
14006 for (i = 0; i < XVECLEN (regs, 0); ++i)
14007 {
14008 dw_loc_descr_ref t;
14009
14010 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
14011 VAR_INIT_STATUS_INITIALIZED);
14012 add_loc_descr (&loc_result, t);
14013 add_loc_descr_op_piece (&loc_result, size);
14014 }
14015
14016 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
14017 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14018 return loc_result;
14019 }
14020
14021 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
14022
14023 /* Return a location descriptor that designates a constant i,
14024 as a compound operation from constant (i >> shift), constant shift
14025 and DW_OP_shl. */
14026
14027 static dw_loc_descr_ref
14028 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14029 {
14030 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
14031 add_loc_descr (&ret, int_loc_descriptor (shift));
14032 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
14033 return ret;
14034 }
14035
14036 /* Return a location descriptor that designates constant POLY_I. */
14037
14038 static dw_loc_descr_ref
14039 int_loc_descriptor (poly_int64 poly_i)
14040 {
14041 enum dwarf_location_atom op;
14042
14043 HOST_WIDE_INT i;
14044 if (!poly_i.is_constant (&i))
14045 {
14046 /* Create location descriptions for the non-constant part and
14047 add any constant offset at the end. */
14048 dw_loc_descr_ref ret = NULL;
14049 HOST_WIDE_INT constant = poly_i.coeffs[0];
14050 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
14051 {
14052 HOST_WIDE_INT coeff = poly_i.coeffs[j];
14053 if (coeff != 0)
14054 {
14055 dw_loc_descr_ref start = ret;
14056 unsigned int factor;
14057 int bias;
14058 unsigned int regno = targetm.dwarf_poly_indeterminate_value
14059 (j, &factor, &bias);
14060
14061 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
14062 add COEFF * (REGNO / FACTOR) now and subtract
14063 COEFF * BIAS from the final constant part. */
14064 constant -= coeff * bias;
14065 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
14066 if (coeff % factor == 0)
14067 coeff /= factor;
14068 else
14069 {
14070 int amount = exact_log2 (factor);
14071 gcc_assert (amount >= 0);
14072 add_loc_descr (&ret, int_loc_descriptor (amount));
14073 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14074 }
14075 if (coeff != 1)
14076 {
14077 add_loc_descr (&ret, int_loc_descriptor (coeff));
14078 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14079 }
14080 if (start)
14081 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
14082 }
14083 }
14084 loc_descr_plus_const (&ret, constant);
14085 return ret;
14086 }
14087
14088 /* Pick the smallest representation of a constant, rather than just
14089 defaulting to the LEB encoding. */
14090 if (i >= 0)
14091 {
14092 int clz = clz_hwi (i);
14093 int ctz = ctz_hwi (i);
14094 if (i <= 31)
14095 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
14096 else if (i <= 0xff)
14097 op = DW_OP_const1u;
14098 else if (i <= 0xffff)
14099 op = DW_OP_const2u;
14100 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14101 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14102 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
14103 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
14104 while DW_OP_const4u is 5 bytes. */
14105 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
14106 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14107 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14108 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
14109 while DW_OP_const4u is 5 bytes. */
14110 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14111
14112 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14113 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14114 <= 4)
14115 {
14116 /* As i >= 2**31, the double cast above will yield a negative number.
14117 Since wrapping is defined in DWARF expressions we can output big
14118 positive integers as small negative ones, regardless of the size
14119 of host wide ints.
14120
14121 Here, since the evaluator will handle 32-bit values and since i >=
14122 2**31, we know it's going to be interpreted as a negative literal:
14123 store it this way if we can do better than 5 bytes this way. */
14124 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14125 }
14126 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14127 op = DW_OP_const4u;
14128
14129 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14130 least 6 bytes: see if we can do better before falling back to it. */
14131 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14132 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14133 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14134 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14135 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14136 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14137 >= HOST_BITS_PER_WIDE_INT)
14138 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14139 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14140 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14141 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14142 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14143 && size_of_uleb128 (i) > 6)
14144 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14145 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14146 else
14147 op = DW_OP_constu;
14148 }
14149 else
14150 {
14151 if (i >= -0x80)
14152 op = DW_OP_const1s;
14153 else if (i >= -0x8000)
14154 op = DW_OP_const2s;
14155 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14156 {
14157 if (size_of_int_loc_descriptor (i) < 5)
14158 {
14159 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14160 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14161 return ret;
14162 }
14163 op = DW_OP_const4s;
14164 }
14165 else
14166 {
14167 if (size_of_int_loc_descriptor (i)
14168 < (unsigned long) 1 + size_of_sleb128 (i))
14169 {
14170 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14171 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14172 return ret;
14173 }
14174 op = DW_OP_consts;
14175 }
14176 }
14177
14178 return new_loc_descr (op, i, 0);
14179 }
14180
14181 /* Likewise, for unsigned constants. */
14182
14183 static dw_loc_descr_ref
14184 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14185 {
14186 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14187 const unsigned HOST_WIDE_INT max_uint
14188 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14189
14190 /* If possible, use the clever signed constants handling. */
14191 if (i <= max_int)
14192 return int_loc_descriptor ((HOST_WIDE_INT) i);
14193
14194 /* Here, we are left with positive numbers that cannot be represented as
14195 HOST_WIDE_INT, i.e.:
14196 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14197
14198 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14199 whereas may be better to output a negative integer: thanks to integer
14200 wrapping, we know that:
14201 x = x - 2 ** DWARF2_ADDR_SIZE
14202 = x - 2 * (max (HOST_WIDE_INT) + 1)
14203 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14204 small negative integers. Let's try that in cases it will clearly improve
14205 the encoding: there is no gain turning DW_OP_const4u into
14206 DW_OP_const4s. */
14207 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14208 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14209 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14210 {
14211 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14212
14213 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14214 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14215 const HOST_WIDE_INT second_shift
14216 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14217
14218 /* So we finally have:
14219 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14220 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14221 return int_loc_descriptor (second_shift);
14222 }
14223
14224 /* Last chance: fallback to a simple constant operation. */
14225 return new_loc_descr
14226 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14227 ? DW_OP_const4u
14228 : DW_OP_const8u,
14229 i, 0);
14230 }
14231
14232 /* Generate and return a location description that computes the unsigned
14233 comparison of the two stack top entries (a OP b where b is the top-most
14234 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14235 LE_EXPR, GT_EXPR or GE_EXPR. */
14236
14237 static dw_loc_descr_ref
14238 uint_comparison_loc_list (enum tree_code kind)
14239 {
14240 enum dwarf_location_atom op, flip_op;
14241 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14242
14243 switch (kind)
14244 {
14245 case LT_EXPR:
14246 op = DW_OP_lt;
14247 break;
14248 case LE_EXPR:
14249 op = DW_OP_le;
14250 break;
14251 case GT_EXPR:
14252 op = DW_OP_gt;
14253 break;
14254 case GE_EXPR:
14255 op = DW_OP_ge;
14256 break;
14257 default:
14258 gcc_unreachable ();
14259 }
14260
14261 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14262 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14263
14264 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14265 possible to perform unsigned comparisons: we just have to distinguish
14266 three cases:
14267
14268 1. when a and b have the same sign (as signed integers); then we should
14269 return: a OP(signed) b;
14270
14271 2. when a is a negative signed integer while b is a positive one, then a
14272 is a greater unsigned integer than b; likewise when a and b's roles
14273 are flipped.
14274
14275 So first, compare the sign of the two operands. */
14276 ret = new_loc_descr (DW_OP_over, 0, 0);
14277 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14278 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14279 /* If they have different signs (i.e. they have different sign bits), then
14280 the stack top value has now the sign bit set and thus it's smaller than
14281 zero. */
14282 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14283 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14284 add_loc_descr (&ret, bra_node);
14285
14286 /* We are in case 1. At this point, we know both operands have the same
14287 sign, to it's safe to use the built-in signed comparison. */
14288 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14289 add_loc_descr (&ret, jmp_node);
14290
14291 /* We are in case 2. Here, we know both operands do not have the same sign,
14292 so we have to flip the signed comparison. */
14293 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14294 tmp = new_loc_descr (flip_op, 0, 0);
14295 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14296 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14297 add_loc_descr (&ret, tmp);
14298
14299 /* This dummy operation is necessary to make the two branches join. */
14300 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14301 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14302 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14303 add_loc_descr (&ret, tmp);
14304
14305 return ret;
14306 }
14307
14308 /* Likewise, but takes the location description lists (might be destructive on
14309 them). Return NULL if either is NULL or if concatenation fails. */
14310
14311 static dw_loc_list_ref
14312 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14313 enum tree_code kind)
14314 {
14315 if (left == NULL || right == NULL)
14316 return NULL;
14317
14318 add_loc_list (&left, right);
14319 if (left == NULL)
14320 return NULL;
14321
14322 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14323 return left;
14324 }
14325
14326 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14327 without actually allocating it. */
14328
14329 static unsigned long
14330 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14331 {
14332 return size_of_int_loc_descriptor (i >> shift)
14333 + size_of_int_loc_descriptor (shift)
14334 + 1;
14335 }
14336
14337 /* Return size_of_locs (int_loc_descriptor (i)) without
14338 actually allocating it. */
14339
14340 static unsigned long
14341 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14342 {
14343 unsigned long s;
14344
14345 if (i >= 0)
14346 {
14347 int clz, ctz;
14348 if (i <= 31)
14349 return 1;
14350 else if (i <= 0xff)
14351 return 2;
14352 else if (i <= 0xffff)
14353 return 3;
14354 clz = clz_hwi (i);
14355 ctz = ctz_hwi (i);
14356 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14357 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14358 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14359 - clz - 5);
14360 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14361 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14362 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14363 - clz - 8);
14364 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14365 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14366 <= 4)
14367 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14368 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14369 return 5;
14370 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14371 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14372 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14373 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14374 - clz - 8);
14375 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14376 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14377 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14378 - clz - 16);
14379 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14380 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14381 && s > 6)
14382 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14383 - clz - 32);
14384 else
14385 return 1 + s;
14386 }
14387 else
14388 {
14389 if (i >= -0x80)
14390 return 2;
14391 else if (i >= -0x8000)
14392 return 3;
14393 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14394 {
14395 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14396 {
14397 s = size_of_int_loc_descriptor (-i) + 1;
14398 if (s < 5)
14399 return s;
14400 }
14401 return 5;
14402 }
14403 else
14404 {
14405 unsigned long r = 1 + size_of_sleb128 (i);
14406 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14407 {
14408 s = size_of_int_loc_descriptor (-i) + 1;
14409 if (s < r)
14410 return s;
14411 }
14412 return r;
14413 }
14414 }
14415 }
14416
14417 /* Return loc description representing "address" of integer value.
14418 This can appear only as toplevel expression. */
14419
14420 static dw_loc_descr_ref
14421 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14422 {
14423 int litsize;
14424 dw_loc_descr_ref loc_result = NULL;
14425
14426 if (!(dwarf_version >= 4 || !dwarf_strict))
14427 return NULL;
14428
14429 litsize = size_of_int_loc_descriptor (i);
14430 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14431 is more compact. For DW_OP_stack_value we need:
14432 litsize + 1 (DW_OP_stack_value)
14433 and for DW_OP_implicit_value:
14434 1 (DW_OP_implicit_value) + 1 (length) + size. */
14435 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14436 {
14437 loc_result = int_loc_descriptor (i);
14438 add_loc_descr (&loc_result,
14439 new_loc_descr (DW_OP_stack_value, 0, 0));
14440 return loc_result;
14441 }
14442
14443 loc_result = new_loc_descr (DW_OP_implicit_value,
14444 size, 0);
14445 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14446 loc_result->dw_loc_oprnd2.v.val_int = i;
14447 return loc_result;
14448 }
14449
14450 /* Return a location descriptor that designates a base+offset location. */
14451
14452 static dw_loc_descr_ref
14453 based_loc_descr (rtx reg, poly_int64 offset,
14454 enum var_init_status initialized)
14455 {
14456 unsigned int regno;
14457 dw_loc_descr_ref result;
14458 dw_fde_ref fde = cfun->fde;
14459
14460 /* We only use "frame base" when we're sure we're talking about the
14461 post-prologue local stack frame. We do this by *not* running
14462 register elimination until this point, and recognizing the special
14463 argument pointer and soft frame pointer rtx's. */
14464 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14465 {
14466 rtx elim = (ira_use_lra_p
14467 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14468 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14469
14470 if (elim != reg)
14471 {
14472 /* Allow hard frame pointer here even if frame pointer
14473 isn't used since hard frame pointer is encoded with
14474 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14475 not hard frame pointer directly. */
14476 elim = strip_offset_and_add (elim, &offset);
14477 gcc_assert (elim == hard_frame_pointer_rtx
14478 || elim == stack_pointer_rtx);
14479
14480 /* If drap register is used to align stack, use frame
14481 pointer + offset to access stack variables. If stack
14482 is aligned without drap, use stack pointer + offset to
14483 access stack variables. */
14484 if (crtl->stack_realign_tried
14485 && reg == frame_pointer_rtx)
14486 {
14487 int base_reg
14488 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14489 ? HARD_FRAME_POINTER_REGNUM
14490 : REGNO (elim));
14491 return new_reg_loc_descr (base_reg, offset);
14492 }
14493
14494 gcc_assert (frame_pointer_fb_offset_valid);
14495 offset += frame_pointer_fb_offset;
14496 HOST_WIDE_INT const_offset;
14497 if (offset.is_constant (&const_offset))
14498 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14499 else
14500 {
14501 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14502 loc_descr_plus_const (&ret, offset);
14503 return ret;
14504 }
14505 }
14506 }
14507
14508 regno = REGNO (reg);
14509 #ifdef LEAF_REG_REMAP
14510 if (crtl->uses_only_leaf_regs)
14511 {
14512 int leaf_reg = LEAF_REG_REMAP (regno);
14513 if (leaf_reg != -1)
14514 regno = (unsigned) leaf_reg;
14515 }
14516 #endif
14517 regno = DWARF_FRAME_REGNUM (regno);
14518
14519 HOST_WIDE_INT const_offset;
14520 if (!optimize && fde
14521 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14522 && offset.is_constant (&const_offset))
14523 {
14524 /* Use cfa+offset to represent the location of arguments passed
14525 on the stack when drap is used to align stack.
14526 Only do this when not optimizing, for optimized code var-tracking
14527 is supposed to track where the arguments live and the register
14528 used as vdrap or drap in some spot might be used for something
14529 else in other part of the routine. */
14530 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14531 }
14532
14533 result = new_reg_loc_descr (regno, offset);
14534
14535 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14536 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14537
14538 return result;
14539 }
14540
14541 /* Return true if this RTL expression describes a base+offset calculation. */
14542
14543 static inline int
14544 is_based_loc (const_rtx rtl)
14545 {
14546 return (GET_CODE (rtl) == PLUS
14547 && ((REG_P (XEXP (rtl, 0))
14548 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14549 && CONST_INT_P (XEXP (rtl, 1)))));
14550 }
14551
14552 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14553 failed. */
14554
14555 static dw_loc_descr_ref
14556 tls_mem_loc_descriptor (rtx mem)
14557 {
14558 tree base;
14559 dw_loc_descr_ref loc_result;
14560
14561 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14562 return NULL;
14563
14564 base = get_base_address (MEM_EXPR (mem));
14565 if (base == NULL
14566 || !VAR_P (base)
14567 || !DECL_THREAD_LOCAL_P (base))
14568 return NULL;
14569
14570 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14571 if (loc_result == NULL)
14572 return NULL;
14573
14574 if (maybe_ne (MEM_OFFSET (mem), 0))
14575 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14576
14577 return loc_result;
14578 }
14579
14580 /* Output debug info about reason why we failed to expand expression as dwarf
14581 expression. */
14582
14583 static void
14584 expansion_failed (tree expr, rtx rtl, char const *reason)
14585 {
14586 if (dump_file && (dump_flags & TDF_DETAILS))
14587 {
14588 fprintf (dump_file, "Failed to expand as dwarf: ");
14589 if (expr)
14590 print_generic_expr (dump_file, expr, dump_flags);
14591 if (rtl)
14592 {
14593 fprintf (dump_file, "\n");
14594 print_rtl (dump_file, rtl);
14595 }
14596 fprintf (dump_file, "\nReason: %s\n", reason);
14597 }
14598 }
14599
14600 /* Helper function for const_ok_for_output. */
14601
14602 static bool
14603 const_ok_for_output_1 (rtx rtl)
14604 {
14605 if (targetm.const_not_ok_for_debug_p (rtl))
14606 {
14607 if (GET_CODE (rtl) != UNSPEC)
14608 {
14609 expansion_failed (NULL_TREE, rtl,
14610 "Expression rejected for debug by the backend.\n");
14611 return false;
14612 }
14613
14614 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14615 the target hook doesn't explicitly allow it in debug info, assume
14616 we can't express it in the debug info. */
14617 /* Don't complain about TLS UNSPECs, those are just too hard to
14618 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14619 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14620 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14621 if (flag_checking
14622 && (XVECLEN (rtl, 0) == 0
14623 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14624 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14625 inform (current_function_decl
14626 ? DECL_SOURCE_LOCATION (current_function_decl)
14627 : UNKNOWN_LOCATION,
14628 #if NUM_UNSPEC_VALUES > 0
14629 "non-delegitimized UNSPEC %s (%d) found in variable location",
14630 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14631 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14632 #else
14633 "non-delegitimized UNSPEC %d found in variable location",
14634 #endif
14635 XINT (rtl, 1));
14636 expansion_failed (NULL_TREE, rtl,
14637 "UNSPEC hasn't been delegitimized.\n");
14638 return false;
14639 }
14640
14641 if (CONST_POLY_INT_P (rtl))
14642 return false;
14643
14644 /* FIXME: Refer to PR60655. It is possible for simplification
14645 of rtl expressions in var tracking to produce such expressions.
14646 We should really identify / validate expressions
14647 enclosed in CONST that can be handled by assemblers on various
14648 targets and only handle legitimate cases here. */
14649 switch (GET_CODE (rtl))
14650 {
14651 case SYMBOL_REF:
14652 break;
14653 case NOT:
14654 case NEG:
14655 return false;
14656 case PLUS:
14657 {
14658 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14659 operands. */
14660 subrtx_var_iterator::array_type array;
14661 bool first = false;
14662 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14663 if (SYMBOL_REF_P (*iter)
14664 || LABEL_P (*iter)
14665 || GET_CODE (*iter) == UNSPEC)
14666 {
14667 first = true;
14668 break;
14669 }
14670 if (!first)
14671 return true;
14672 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14673 if (SYMBOL_REF_P (*iter)
14674 || LABEL_P (*iter)
14675 || GET_CODE (*iter) == UNSPEC)
14676 return false;
14677 return true;
14678 }
14679 case MINUS:
14680 {
14681 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14682 appear in the second operand of MINUS. */
14683 subrtx_var_iterator::array_type array;
14684 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14685 if (SYMBOL_REF_P (*iter)
14686 || LABEL_P (*iter)
14687 || GET_CODE (*iter) == UNSPEC)
14688 return false;
14689 return true;
14690 }
14691 default:
14692 return true;
14693 }
14694
14695 if (CONSTANT_POOL_ADDRESS_P (rtl))
14696 {
14697 bool marked;
14698 get_pool_constant_mark (rtl, &marked);
14699 /* If all references to this pool constant were optimized away,
14700 it was not output and thus we can't represent it. */
14701 if (!marked)
14702 {
14703 expansion_failed (NULL_TREE, rtl,
14704 "Constant was removed from constant pool.\n");
14705 return false;
14706 }
14707 }
14708
14709 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14710 return false;
14711
14712 /* Avoid references to external symbols in debug info, on several targets
14713 the linker might even refuse to link when linking a shared library,
14714 and in many other cases the relocations for .debug_info/.debug_loc are
14715 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14716 to be defined within the same shared library or executable are fine. */
14717 if (SYMBOL_REF_EXTERNAL_P (rtl))
14718 {
14719 tree decl = SYMBOL_REF_DECL (rtl);
14720
14721 if (decl == NULL || !targetm.binds_local_p (decl))
14722 {
14723 expansion_failed (NULL_TREE, rtl,
14724 "Symbol not defined in current TU.\n");
14725 return false;
14726 }
14727 }
14728
14729 return true;
14730 }
14731
14732 /* Return true if constant RTL can be emitted in DW_OP_addr or
14733 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14734 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14735
14736 static bool
14737 const_ok_for_output (rtx rtl)
14738 {
14739 if (GET_CODE (rtl) == SYMBOL_REF)
14740 return const_ok_for_output_1 (rtl);
14741
14742 if (GET_CODE (rtl) == CONST)
14743 {
14744 subrtx_var_iterator::array_type array;
14745 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14746 if (!const_ok_for_output_1 (*iter))
14747 return false;
14748 return true;
14749 }
14750
14751 return true;
14752 }
14753
14754 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14755 if possible, NULL otherwise. */
14756
14757 static dw_die_ref
14758 base_type_for_mode (machine_mode mode, bool unsignedp)
14759 {
14760 dw_die_ref type_die;
14761 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14762
14763 if (type == NULL)
14764 return NULL;
14765 switch (TREE_CODE (type))
14766 {
14767 case INTEGER_TYPE:
14768 case REAL_TYPE:
14769 break;
14770 default:
14771 return NULL;
14772 }
14773 type_die = lookup_type_die (type);
14774 if (!type_die)
14775 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14776 comp_unit_die ());
14777 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14778 return NULL;
14779 return type_die;
14780 }
14781
14782 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14783 type matching MODE, or, if MODE is narrower than or as wide as
14784 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14785 possible. */
14786
14787 static dw_loc_descr_ref
14788 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14789 {
14790 machine_mode outer_mode = mode;
14791 dw_die_ref type_die;
14792 dw_loc_descr_ref cvt;
14793
14794 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14795 {
14796 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14797 return op;
14798 }
14799 type_die = base_type_for_mode (outer_mode, 1);
14800 if (type_die == NULL)
14801 return NULL;
14802 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14803 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14804 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14805 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14806 add_loc_descr (&op, cvt);
14807 return op;
14808 }
14809
14810 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14811
14812 static dw_loc_descr_ref
14813 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14814 dw_loc_descr_ref op1)
14815 {
14816 dw_loc_descr_ref ret = op0;
14817 add_loc_descr (&ret, op1);
14818 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14819 if (STORE_FLAG_VALUE != 1)
14820 {
14821 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14822 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14823 }
14824 return ret;
14825 }
14826
14827 /* Subroutine of scompare_loc_descriptor for the case in which we're
14828 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14829 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14830
14831 static dw_loc_descr_ref
14832 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14833 scalar_int_mode op_mode,
14834 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14835 {
14836 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14837 dw_loc_descr_ref cvt;
14838
14839 if (type_die == NULL)
14840 return NULL;
14841 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14842 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14843 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14844 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14845 add_loc_descr (&op0, cvt);
14846 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14847 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14848 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14849 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14850 add_loc_descr (&op1, cvt);
14851 return compare_loc_descriptor (op, op0, op1);
14852 }
14853
14854 /* Subroutine of scompare_loc_descriptor for the case in which we're
14855 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14856 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14857
14858 static dw_loc_descr_ref
14859 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14860 scalar_int_mode op_mode,
14861 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14862 {
14863 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14864 /* For eq/ne, if the operands are known to be zero-extended,
14865 there is no need to do the fancy shifting up. */
14866 if (op == DW_OP_eq || op == DW_OP_ne)
14867 {
14868 dw_loc_descr_ref last0, last1;
14869 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14870 ;
14871 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14872 ;
14873 /* deref_size zero extends, and for constants we can check
14874 whether they are zero extended or not. */
14875 if (((last0->dw_loc_opc == DW_OP_deref_size
14876 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14877 || (CONST_INT_P (XEXP (rtl, 0))
14878 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14879 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14880 && ((last1->dw_loc_opc == DW_OP_deref_size
14881 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14882 || (CONST_INT_P (XEXP (rtl, 1))
14883 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14884 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14885 return compare_loc_descriptor (op, op0, op1);
14886
14887 /* EQ/NE comparison against constant in narrower type than
14888 DWARF2_ADDR_SIZE can be performed either as
14889 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14890 DW_OP_{eq,ne}
14891 or
14892 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14893 DW_OP_{eq,ne}. Pick whatever is shorter. */
14894 if (CONST_INT_P (XEXP (rtl, 1))
14895 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14896 && (size_of_int_loc_descriptor (shift) + 1
14897 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14898 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14899 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14900 & GET_MODE_MASK (op_mode))))
14901 {
14902 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14903 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14904 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14905 & GET_MODE_MASK (op_mode));
14906 return compare_loc_descriptor (op, op0, op1);
14907 }
14908 }
14909 add_loc_descr (&op0, int_loc_descriptor (shift));
14910 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14911 if (CONST_INT_P (XEXP (rtl, 1)))
14912 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14913 else
14914 {
14915 add_loc_descr (&op1, int_loc_descriptor (shift));
14916 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14917 }
14918 return compare_loc_descriptor (op, op0, op1);
14919 }
14920
14921 /* Return location descriptor for unsigned comparison OP RTL. */
14922
14923 static dw_loc_descr_ref
14924 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14925 machine_mode mem_mode)
14926 {
14927 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14928 dw_loc_descr_ref op0, op1;
14929
14930 if (op_mode == VOIDmode)
14931 op_mode = GET_MODE (XEXP (rtl, 1));
14932 if (op_mode == VOIDmode)
14933 return NULL;
14934
14935 scalar_int_mode int_op_mode;
14936 if (dwarf_strict
14937 && dwarf_version < 5
14938 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14939 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14940 return NULL;
14941
14942 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14943 VAR_INIT_STATUS_INITIALIZED);
14944 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14945 VAR_INIT_STATUS_INITIALIZED);
14946
14947 if (op0 == NULL || op1 == NULL)
14948 return NULL;
14949
14950 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14951 {
14952 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14953 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14954
14955 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14956 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14957 }
14958 return compare_loc_descriptor (op, op0, op1);
14959 }
14960
14961 /* Return location descriptor for unsigned comparison OP RTL. */
14962
14963 static dw_loc_descr_ref
14964 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14965 machine_mode mem_mode)
14966 {
14967 dw_loc_descr_ref op0, op1;
14968
14969 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14970 if (test_op_mode == VOIDmode)
14971 test_op_mode = GET_MODE (XEXP (rtl, 1));
14972
14973 scalar_int_mode op_mode;
14974 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14975 return NULL;
14976
14977 if (dwarf_strict
14978 && dwarf_version < 5
14979 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14980 return NULL;
14981
14982 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14983 VAR_INIT_STATUS_INITIALIZED);
14984 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14985 VAR_INIT_STATUS_INITIALIZED);
14986
14987 if (op0 == NULL || op1 == NULL)
14988 return NULL;
14989
14990 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14991 {
14992 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14993 dw_loc_descr_ref last0, last1;
14994 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14995 ;
14996 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14997 ;
14998 if (CONST_INT_P (XEXP (rtl, 0)))
14999 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
15000 /* deref_size zero extends, so no need to mask it again. */
15001 else if (last0->dw_loc_opc != DW_OP_deref_size
15002 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
15003 {
15004 add_loc_descr (&op0, int_loc_descriptor (mask));
15005 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
15006 }
15007 if (CONST_INT_P (XEXP (rtl, 1)))
15008 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
15009 /* deref_size zero extends, so no need to mask it again. */
15010 else if (last1->dw_loc_opc != DW_OP_deref_size
15011 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
15012 {
15013 add_loc_descr (&op1, int_loc_descriptor (mask));
15014 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
15015 }
15016 }
15017 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
15018 {
15019 HOST_WIDE_INT bias = 1;
15020 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
15021 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15022 if (CONST_INT_P (XEXP (rtl, 1)))
15023 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
15024 + INTVAL (XEXP (rtl, 1)));
15025 else
15026 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
15027 bias, 0));
15028 }
15029 return compare_loc_descriptor (op, op0, op1);
15030 }
15031
15032 /* Return location descriptor for {U,S}{MIN,MAX}. */
15033
15034 static dw_loc_descr_ref
15035 minmax_loc_descriptor (rtx rtl, machine_mode mode,
15036 machine_mode mem_mode)
15037 {
15038 enum dwarf_location_atom op;
15039 dw_loc_descr_ref op0, op1, ret;
15040 dw_loc_descr_ref bra_node, drop_node;
15041
15042 scalar_int_mode int_mode;
15043 if (dwarf_strict
15044 && dwarf_version < 5
15045 && (!is_a <scalar_int_mode> (mode, &int_mode)
15046 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
15047 return NULL;
15048
15049 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15050 VAR_INIT_STATUS_INITIALIZED);
15051 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15052 VAR_INIT_STATUS_INITIALIZED);
15053
15054 if (op0 == NULL || op1 == NULL)
15055 return NULL;
15056
15057 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
15058 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
15059 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
15060 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
15061 {
15062 /* Checked by the caller. */
15063 int_mode = as_a <scalar_int_mode> (mode);
15064 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
15065 {
15066 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
15067 add_loc_descr (&op0, int_loc_descriptor (mask));
15068 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
15069 add_loc_descr (&op1, int_loc_descriptor (mask));
15070 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
15071 }
15072 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15073 {
15074 HOST_WIDE_INT bias = 1;
15075 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
15076 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15077 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15078 }
15079 }
15080 else if (is_a <scalar_int_mode> (mode, &int_mode)
15081 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
15082 {
15083 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
15084 add_loc_descr (&op0, int_loc_descriptor (shift));
15085 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
15086 add_loc_descr (&op1, int_loc_descriptor (shift));
15087 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
15088 }
15089 else if (is_a <scalar_int_mode> (mode, &int_mode)
15090 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15091 {
15092 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
15093 dw_loc_descr_ref cvt;
15094 if (type_die == NULL)
15095 return NULL;
15096 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15097 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15098 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15099 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15100 add_loc_descr (&op0, cvt);
15101 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15102 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15103 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15104 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15105 add_loc_descr (&op1, cvt);
15106 }
15107
15108 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
15109 op = DW_OP_lt;
15110 else
15111 op = DW_OP_gt;
15112 ret = op0;
15113 add_loc_descr (&ret, op1);
15114 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
15115 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15116 add_loc_descr (&ret, bra_node);
15117 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15118 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15119 add_loc_descr (&ret, drop_node);
15120 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15121 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15122 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15123 && is_a <scalar_int_mode> (mode, &int_mode)
15124 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15125 ret = convert_descriptor_to_mode (int_mode, ret);
15126 return ret;
15127 }
15128
15129 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15130 but after converting arguments to type_die, afterwards
15131 convert back to unsigned. */
15132
15133 static dw_loc_descr_ref
15134 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15135 scalar_int_mode mode, machine_mode mem_mode)
15136 {
15137 dw_loc_descr_ref cvt, op0, op1;
15138
15139 if (type_die == NULL)
15140 return NULL;
15141 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15142 VAR_INIT_STATUS_INITIALIZED);
15143 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 if (op0 == NULL || op1 == NULL)
15146 return NULL;
15147 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15148 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15149 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15150 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15151 add_loc_descr (&op0, cvt);
15152 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15153 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15154 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15155 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15156 add_loc_descr (&op1, cvt);
15157 add_loc_descr (&op0, op1);
15158 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15159 return convert_descriptor_to_mode (mode, op0);
15160 }
15161
15162 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15163 const0 is DW_OP_lit0 or corresponding typed constant,
15164 const1 is DW_OP_lit1 or corresponding typed constant
15165 and constMSB is constant with just the MSB bit set
15166 for the mode):
15167 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15168 L1: const0 DW_OP_swap
15169 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15170 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15171 L3: DW_OP_drop
15172 L4: DW_OP_nop
15173
15174 CTZ is similar:
15175 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15176 L1: const0 DW_OP_swap
15177 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15178 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15179 L3: DW_OP_drop
15180 L4: DW_OP_nop
15181
15182 FFS is similar:
15183 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15184 L1: const1 DW_OP_swap
15185 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15186 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15187 L3: DW_OP_drop
15188 L4: DW_OP_nop */
15189
15190 static dw_loc_descr_ref
15191 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15192 machine_mode mem_mode)
15193 {
15194 dw_loc_descr_ref op0, ret, tmp;
15195 HOST_WIDE_INT valv;
15196 dw_loc_descr_ref l1jump, l1label;
15197 dw_loc_descr_ref l2jump, l2label;
15198 dw_loc_descr_ref l3jump, l3label;
15199 dw_loc_descr_ref l4jump, l4label;
15200 rtx msb;
15201
15202 if (GET_MODE (XEXP (rtl, 0)) != mode)
15203 return NULL;
15204
15205 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15206 VAR_INIT_STATUS_INITIALIZED);
15207 if (op0 == NULL)
15208 return NULL;
15209 ret = op0;
15210 if (GET_CODE (rtl) == CLZ)
15211 {
15212 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15213 valv = GET_MODE_BITSIZE (mode);
15214 }
15215 else if (GET_CODE (rtl) == FFS)
15216 valv = 0;
15217 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15218 valv = GET_MODE_BITSIZE (mode);
15219 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15220 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15221 add_loc_descr (&ret, l1jump);
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15223 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15224 VAR_INIT_STATUS_INITIALIZED);
15225 if (tmp == NULL)
15226 return NULL;
15227 add_loc_descr (&ret, tmp);
15228 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15229 add_loc_descr (&ret, l4jump);
15230 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15231 ? const1_rtx : const0_rtx,
15232 mode, mem_mode,
15233 VAR_INIT_STATUS_INITIALIZED);
15234 if (l1label == NULL)
15235 return NULL;
15236 add_loc_descr (&ret, l1label);
15237 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15238 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15239 add_loc_descr (&ret, l2label);
15240 if (GET_CODE (rtl) != CLZ)
15241 msb = const1_rtx;
15242 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15243 msb = GEN_INT (HOST_WIDE_INT_1U
15244 << (GET_MODE_BITSIZE (mode) - 1));
15245 else
15246 msb = immed_wide_int_const
15247 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15248 GET_MODE_PRECISION (mode)), mode);
15249 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15250 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15251 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15252 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15253 else
15254 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15255 VAR_INIT_STATUS_INITIALIZED);
15256 if (tmp == NULL)
15257 return NULL;
15258 add_loc_descr (&ret, tmp);
15259 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15260 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15261 add_loc_descr (&ret, l3jump);
15262 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 if (tmp == NULL)
15265 return NULL;
15266 add_loc_descr (&ret, tmp);
15267 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15268 ? DW_OP_shl : DW_OP_shr, 0, 0));
15269 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15270 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15271 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15272 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15273 add_loc_descr (&ret, l2jump);
15274 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15275 add_loc_descr (&ret, l3label);
15276 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15277 add_loc_descr (&ret, l4label);
15278 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15279 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15280 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15281 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15282 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15283 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15284 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15285 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15286 return ret;
15287 }
15288
15289 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15290 const1 is DW_OP_lit1 or corresponding typed constant):
15291 const0 DW_OP_swap
15292 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15293 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15294 L2: DW_OP_drop
15295
15296 PARITY is similar:
15297 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15298 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15299 L2: DW_OP_drop */
15300
15301 static dw_loc_descr_ref
15302 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15303 machine_mode mem_mode)
15304 {
15305 dw_loc_descr_ref op0, ret, tmp;
15306 dw_loc_descr_ref l1jump, l1label;
15307 dw_loc_descr_ref l2jump, l2label;
15308
15309 if (GET_MODE (XEXP (rtl, 0)) != mode)
15310 return NULL;
15311
15312 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15313 VAR_INIT_STATUS_INITIALIZED);
15314 if (op0 == NULL)
15315 return NULL;
15316 ret = op0;
15317 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15318 VAR_INIT_STATUS_INITIALIZED);
15319 if (tmp == NULL)
15320 return NULL;
15321 add_loc_descr (&ret, tmp);
15322 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15323 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15324 add_loc_descr (&ret, l1label);
15325 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15326 add_loc_descr (&ret, l2jump);
15327 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15328 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15329 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15330 VAR_INIT_STATUS_INITIALIZED);
15331 if (tmp == NULL)
15332 return NULL;
15333 add_loc_descr (&ret, tmp);
15334 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15335 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15336 ? DW_OP_plus : DW_OP_xor, 0, 0));
15337 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15338 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15339 VAR_INIT_STATUS_INITIALIZED);
15340 add_loc_descr (&ret, tmp);
15341 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15342 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15343 add_loc_descr (&ret, l1jump);
15344 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15345 add_loc_descr (&ret, l2label);
15346 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15347 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15348 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15349 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15350 return ret;
15351 }
15352
15353 /* BSWAP (constS is initial shift count, either 56 or 24):
15354 constS const0
15355 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15356 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15357 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15358 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15359 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15360
15361 static dw_loc_descr_ref
15362 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15363 machine_mode mem_mode)
15364 {
15365 dw_loc_descr_ref op0, ret, tmp;
15366 dw_loc_descr_ref l1jump, l1label;
15367 dw_loc_descr_ref l2jump, l2label;
15368
15369 if (BITS_PER_UNIT != 8
15370 || (GET_MODE_BITSIZE (mode) != 32
15371 && GET_MODE_BITSIZE (mode) != 64))
15372 return NULL;
15373
15374 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15375 VAR_INIT_STATUS_INITIALIZED);
15376 if (op0 == NULL)
15377 return NULL;
15378
15379 ret = op0;
15380 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15381 mode, mem_mode,
15382 VAR_INIT_STATUS_INITIALIZED);
15383 if (tmp == NULL)
15384 return NULL;
15385 add_loc_descr (&ret, tmp);
15386 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15387 VAR_INIT_STATUS_INITIALIZED);
15388 if (tmp == NULL)
15389 return NULL;
15390 add_loc_descr (&ret, tmp);
15391 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15392 add_loc_descr (&ret, l1label);
15393 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15394 mode, mem_mode,
15395 VAR_INIT_STATUS_INITIALIZED);
15396 add_loc_descr (&ret, tmp);
15397 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15398 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15399 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15400 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15401 VAR_INIT_STATUS_INITIALIZED);
15402 if (tmp == NULL)
15403 return NULL;
15404 add_loc_descr (&ret, tmp);
15405 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15406 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15407 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15408 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15409 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15410 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15411 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15412 VAR_INIT_STATUS_INITIALIZED);
15413 add_loc_descr (&ret, tmp);
15414 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15415 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15416 add_loc_descr (&ret, l2jump);
15417 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15418 VAR_INIT_STATUS_INITIALIZED);
15419 add_loc_descr (&ret, tmp);
15420 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15421 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15422 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15423 add_loc_descr (&ret, l1jump);
15424 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15425 add_loc_descr (&ret, l2label);
15426 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15427 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15428 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15429 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15430 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15431 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15432 return ret;
15433 }
15434
15435 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15436 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15437 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15438 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15439
15440 ROTATERT is similar:
15441 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15442 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15443 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15444
15445 static dw_loc_descr_ref
15446 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15447 machine_mode mem_mode)
15448 {
15449 rtx rtlop1 = XEXP (rtl, 1);
15450 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15451 int i;
15452
15453 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15454 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15455 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15456 VAR_INIT_STATUS_INITIALIZED);
15457 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15458 VAR_INIT_STATUS_INITIALIZED);
15459 if (op0 == NULL || op1 == NULL)
15460 return NULL;
15461 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15462 for (i = 0; i < 2; i++)
15463 {
15464 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15465 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15466 mode, mem_mode,
15467 VAR_INIT_STATUS_INITIALIZED);
15468 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15469 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15470 ? DW_OP_const4u
15471 : HOST_BITS_PER_WIDE_INT == 64
15472 ? DW_OP_const8u : DW_OP_constu,
15473 GET_MODE_MASK (mode), 0);
15474 else
15475 mask[i] = NULL;
15476 if (mask[i] == NULL)
15477 return NULL;
15478 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15479 }
15480 ret = op0;
15481 add_loc_descr (&ret, op1);
15482 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15483 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15484 if (GET_CODE (rtl) == ROTATERT)
15485 {
15486 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15487 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15488 GET_MODE_BITSIZE (mode), 0));
15489 }
15490 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15491 if (mask[0] != NULL)
15492 add_loc_descr (&ret, mask[0]);
15493 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15494 if (mask[1] != NULL)
15495 {
15496 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15497 add_loc_descr (&ret, mask[1]);
15498 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15499 }
15500 if (GET_CODE (rtl) == ROTATE)
15501 {
15502 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15503 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15504 GET_MODE_BITSIZE (mode), 0));
15505 }
15506 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15507 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15508 return ret;
15509 }
15510
15511 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15512 for DEBUG_PARAMETER_REF RTL. */
15513
15514 static dw_loc_descr_ref
15515 parameter_ref_descriptor (rtx rtl)
15516 {
15517 dw_loc_descr_ref ret;
15518 dw_die_ref ref;
15519
15520 if (dwarf_strict)
15521 return NULL;
15522 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15523 /* With LTO during LTRANS we get the late DIE that refers to the early
15524 DIE, thus we add another indirection here. This seems to confuse
15525 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15526 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15527 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15528 if (ref)
15529 {
15530 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15531 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15532 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15533 }
15534 else
15535 {
15536 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15537 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15538 }
15539 return ret;
15540 }
15541
15542 /* The following routine converts the RTL for a variable or parameter
15543 (resident in memory) into an equivalent Dwarf representation of a
15544 mechanism for getting the address of that same variable onto the top of a
15545 hypothetical "address evaluation" stack.
15546
15547 When creating memory location descriptors, we are effectively transforming
15548 the RTL for a memory-resident object into its Dwarf postfix expression
15549 equivalent. This routine recursively descends an RTL tree, turning
15550 it into Dwarf postfix code as it goes.
15551
15552 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15553
15554 MEM_MODE is the mode of the memory reference, needed to handle some
15555 autoincrement addressing modes.
15556
15557 Return 0 if we can't represent the location. */
15558
15559 dw_loc_descr_ref
15560 mem_loc_descriptor (rtx rtl, machine_mode mode,
15561 machine_mode mem_mode,
15562 enum var_init_status initialized)
15563 {
15564 dw_loc_descr_ref mem_loc_result = NULL;
15565 enum dwarf_location_atom op;
15566 dw_loc_descr_ref op0, op1;
15567 rtx inner = NULL_RTX;
15568 poly_int64 offset;
15569
15570 if (mode == VOIDmode)
15571 mode = GET_MODE (rtl);
15572
15573 /* Note that for a dynamically sized array, the location we will generate a
15574 description of here will be the lowest numbered location which is
15575 actually within the array. That's *not* necessarily the same as the
15576 zeroth element of the array. */
15577
15578 rtl = targetm.delegitimize_address (rtl);
15579
15580 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15581 return NULL;
15582
15583 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15584 switch (GET_CODE (rtl))
15585 {
15586 case POST_INC:
15587 case POST_DEC:
15588 case POST_MODIFY:
15589 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15590
15591 case SUBREG:
15592 /* The case of a subreg may arise when we have a local (register)
15593 variable or a formal (register) parameter which doesn't quite fill
15594 up an entire register. For now, just assume that it is
15595 legitimate to make the Dwarf info refer to the whole register which
15596 contains the given subreg. */
15597 if (!subreg_lowpart_p (rtl))
15598 break;
15599 inner = SUBREG_REG (rtl);
15600 /* FALLTHRU */
15601 case TRUNCATE:
15602 if (inner == NULL_RTX)
15603 inner = XEXP (rtl, 0);
15604 if (is_a <scalar_int_mode> (mode, &int_mode)
15605 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15606 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15607 #ifdef POINTERS_EXTEND_UNSIGNED
15608 || (int_mode == Pmode && mem_mode != VOIDmode)
15609 #endif
15610 )
15611 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15612 {
15613 mem_loc_result = mem_loc_descriptor (inner,
15614 inner_mode,
15615 mem_mode, initialized);
15616 break;
15617 }
15618 if (dwarf_strict && dwarf_version < 5)
15619 break;
15620 if (is_a <scalar_int_mode> (mode, &int_mode)
15621 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15622 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15623 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15624 {
15625 dw_die_ref type_die;
15626 dw_loc_descr_ref cvt;
15627
15628 mem_loc_result = mem_loc_descriptor (inner,
15629 GET_MODE (inner),
15630 mem_mode, initialized);
15631 if (mem_loc_result == NULL)
15632 break;
15633 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15634 if (type_die == NULL)
15635 {
15636 mem_loc_result = NULL;
15637 break;
15638 }
15639 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15640 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15641 else
15642 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15643 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15644 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15645 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15646 add_loc_descr (&mem_loc_result, cvt);
15647 if (is_a <scalar_int_mode> (mode, &int_mode)
15648 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15649 {
15650 /* Convert it to untyped afterwards. */
15651 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15652 add_loc_descr (&mem_loc_result, cvt);
15653 }
15654 }
15655 break;
15656
15657 case REG:
15658 if (!is_a <scalar_int_mode> (mode, &int_mode)
15659 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15660 && rtl != arg_pointer_rtx
15661 && rtl != frame_pointer_rtx
15662 #ifdef POINTERS_EXTEND_UNSIGNED
15663 && (int_mode != Pmode || mem_mode == VOIDmode)
15664 #endif
15665 ))
15666 {
15667 dw_die_ref type_die;
15668 unsigned int dbx_regnum;
15669
15670 if (dwarf_strict && dwarf_version < 5)
15671 break;
15672 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15673 break;
15674 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15675 if (type_die == NULL)
15676 break;
15677
15678 dbx_regnum = dbx_reg_number (rtl);
15679 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15680 break;
15681 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15682 dbx_regnum, 0);
15683 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15684 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15685 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15686 break;
15687 }
15688 /* Whenever a register number forms a part of the description of the
15689 method for calculating the (dynamic) address of a memory resident
15690 object, DWARF rules require the register number be referred to as
15691 a "base register". This distinction is not based in any way upon
15692 what category of register the hardware believes the given register
15693 belongs to. This is strictly DWARF terminology we're dealing with
15694 here. Note that in cases where the location of a memory-resident
15695 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15696 OP_CONST (0)) the actual DWARF location descriptor that we generate
15697 may just be OP_BASEREG (basereg). This may look deceptively like
15698 the object in question was allocated to a register (rather than in
15699 memory) so DWARF consumers need to be aware of the subtle
15700 distinction between OP_REG and OP_BASEREG. */
15701 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15702 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15703 else if (stack_realign_drap
15704 && crtl->drap_reg
15705 && crtl->args.internal_arg_pointer == rtl
15706 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15707 {
15708 /* If RTL is internal_arg_pointer, which has been optimized
15709 out, use DRAP instead. */
15710 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15711 VAR_INIT_STATUS_INITIALIZED);
15712 }
15713 break;
15714
15715 case SIGN_EXTEND:
15716 case ZERO_EXTEND:
15717 if (!is_a <scalar_int_mode> (mode, &int_mode)
15718 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15719 break;
15720 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15721 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15722 if (op0 == 0)
15723 break;
15724 else if (GET_CODE (rtl) == ZERO_EXTEND
15725 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15726 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15727 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15728 to expand zero extend as two shifts instead of
15729 masking. */
15730 && GET_MODE_SIZE (inner_mode) <= 4)
15731 {
15732 mem_loc_result = op0;
15733 add_loc_descr (&mem_loc_result,
15734 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15735 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15736 }
15737 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15738 {
15739 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15740 shift *= BITS_PER_UNIT;
15741 if (GET_CODE (rtl) == SIGN_EXTEND)
15742 op = DW_OP_shra;
15743 else
15744 op = DW_OP_shr;
15745 mem_loc_result = op0;
15746 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15747 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15748 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15749 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15750 }
15751 else if (!dwarf_strict || dwarf_version >= 5)
15752 {
15753 dw_die_ref type_die1, type_die2;
15754 dw_loc_descr_ref cvt;
15755
15756 type_die1 = base_type_for_mode (inner_mode,
15757 GET_CODE (rtl) == ZERO_EXTEND);
15758 if (type_die1 == NULL)
15759 break;
15760 type_die2 = base_type_for_mode (int_mode, 1);
15761 if (type_die2 == NULL)
15762 break;
15763 mem_loc_result = op0;
15764 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15765 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15766 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15767 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15768 add_loc_descr (&mem_loc_result, cvt);
15769 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15770 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15771 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15772 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15773 add_loc_descr (&mem_loc_result, cvt);
15774 }
15775 break;
15776
15777 case MEM:
15778 {
15779 rtx new_rtl = avoid_constant_pool_reference (rtl);
15780 if (new_rtl != rtl)
15781 {
15782 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15783 initialized);
15784 if (mem_loc_result != NULL)
15785 return mem_loc_result;
15786 }
15787 }
15788 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15789 get_address_mode (rtl), mode,
15790 VAR_INIT_STATUS_INITIALIZED);
15791 if (mem_loc_result == NULL)
15792 mem_loc_result = tls_mem_loc_descriptor (rtl);
15793 if (mem_loc_result != NULL)
15794 {
15795 if (!is_a <scalar_int_mode> (mode, &int_mode)
15796 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15797 {
15798 dw_die_ref type_die;
15799 dw_loc_descr_ref deref;
15800 HOST_WIDE_INT size;
15801
15802 if (dwarf_strict && dwarf_version < 5)
15803 return NULL;
15804 if (!GET_MODE_SIZE (mode).is_constant (&size))
15805 return NULL;
15806 type_die
15807 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15808 if (type_die == NULL)
15809 return NULL;
15810 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15811 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15812 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15813 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15814 add_loc_descr (&mem_loc_result, deref);
15815 }
15816 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15817 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15818 else
15819 add_loc_descr (&mem_loc_result,
15820 new_loc_descr (DW_OP_deref_size,
15821 GET_MODE_SIZE (int_mode), 0));
15822 }
15823 break;
15824
15825 case LO_SUM:
15826 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15827
15828 case LABEL_REF:
15829 /* Some ports can transform a symbol ref into a label ref, because
15830 the symbol ref is too far away and has to be dumped into a constant
15831 pool. */
15832 case CONST:
15833 case SYMBOL_REF:
15834 case UNSPEC:
15835 if (!is_a <scalar_int_mode> (mode, &int_mode)
15836 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15837 #ifdef POINTERS_EXTEND_UNSIGNED
15838 && (int_mode != Pmode || mem_mode == VOIDmode)
15839 #endif
15840 ))
15841 break;
15842
15843 if (GET_CODE (rtl) == UNSPEC)
15844 {
15845 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15846 can't express it in the debug info. This can happen e.g. with some
15847 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15848 approves. */
15849 bool not_ok = false;
15850 subrtx_var_iterator::array_type array;
15851 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15852 if (*iter != rtl && !CONSTANT_P (*iter))
15853 {
15854 not_ok = true;
15855 break;
15856 }
15857
15858 if (not_ok)
15859 break;
15860
15861 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15862 if (!const_ok_for_output_1 (*iter))
15863 {
15864 not_ok = true;
15865 break;
15866 }
15867
15868 if (not_ok)
15869 break;
15870
15871 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15872 goto symref;
15873 }
15874
15875 if (GET_CODE (rtl) == SYMBOL_REF
15876 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15877 {
15878 dw_loc_descr_ref temp;
15879
15880 /* If this is not defined, we have no way to emit the data. */
15881 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15882 break;
15883
15884 temp = new_addr_loc_descr (rtl, dtprel_true);
15885
15886 /* We check for DWARF 5 here because gdb did not implement
15887 DW_OP_form_tls_address until after 7.12. */
15888 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15889 ? DW_OP_form_tls_address
15890 : DW_OP_GNU_push_tls_address),
15891 0, 0);
15892 add_loc_descr (&mem_loc_result, temp);
15893
15894 break;
15895 }
15896
15897 if (!const_ok_for_output (rtl))
15898 {
15899 if (GET_CODE (rtl) == CONST)
15900 switch (GET_CODE (XEXP (rtl, 0)))
15901 {
15902 case NOT:
15903 op = DW_OP_not;
15904 goto try_const_unop;
15905 case NEG:
15906 op = DW_OP_neg;
15907 goto try_const_unop;
15908 try_const_unop:
15909 rtx arg;
15910 arg = XEXP (XEXP (rtl, 0), 0);
15911 if (!CONSTANT_P (arg))
15912 arg = gen_rtx_CONST (int_mode, arg);
15913 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15914 initialized);
15915 if (op0)
15916 {
15917 mem_loc_result = op0;
15918 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15919 }
15920 break;
15921 default:
15922 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15923 mem_mode, initialized);
15924 break;
15925 }
15926 break;
15927 }
15928
15929 symref:
15930 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15931 vec_safe_push (used_rtx_array, rtl);
15932 break;
15933
15934 case CONCAT:
15935 case CONCATN:
15936 case VAR_LOCATION:
15937 case DEBUG_IMPLICIT_PTR:
15938 expansion_failed (NULL_TREE, rtl,
15939 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15940 return 0;
15941
15942 case ENTRY_VALUE:
15943 if (dwarf_strict && dwarf_version < 5)
15944 return NULL;
15945 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15946 {
15947 if (!is_a <scalar_int_mode> (mode, &int_mode)
15948 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15949 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15950 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15951 else
15952 {
15953 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15954 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15955 return NULL;
15956 op0 = one_reg_loc_descriptor (dbx_regnum,
15957 VAR_INIT_STATUS_INITIALIZED);
15958 }
15959 }
15960 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15961 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15962 {
15963 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15964 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15965 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15966 return NULL;
15967 }
15968 else
15969 gcc_unreachable ();
15970 if (op0 == NULL)
15971 return NULL;
15972 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15973 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15974 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15975 break;
15976
15977 case DEBUG_PARAMETER_REF:
15978 mem_loc_result = parameter_ref_descriptor (rtl);
15979 break;
15980
15981 case PRE_MODIFY:
15982 /* Extract the PLUS expression nested inside and fall into
15983 PLUS code below. */
15984 rtl = XEXP (rtl, 1);
15985 goto plus;
15986
15987 case PRE_INC:
15988 case PRE_DEC:
15989 /* Turn these into a PLUS expression and fall into the PLUS code
15990 below. */
15991 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15992 gen_int_mode (GET_CODE (rtl) == PRE_INC
15993 ? GET_MODE_UNIT_SIZE (mem_mode)
15994 : -GET_MODE_UNIT_SIZE (mem_mode),
15995 mode));
15996
15997 /* fall through */
15998
15999 case PLUS:
16000 plus:
16001 if (is_based_loc (rtl)
16002 && is_a <scalar_int_mode> (mode, &int_mode)
16003 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16004 || XEXP (rtl, 0) == arg_pointer_rtx
16005 || XEXP (rtl, 0) == frame_pointer_rtx))
16006 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
16007 INTVAL (XEXP (rtl, 1)),
16008 VAR_INIT_STATUS_INITIALIZED);
16009 else
16010 {
16011 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16012 VAR_INIT_STATUS_INITIALIZED);
16013 if (mem_loc_result == 0)
16014 break;
16015
16016 if (CONST_INT_P (XEXP (rtl, 1))
16017 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
16018 <= DWARF2_ADDR_SIZE))
16019 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
16020 else
16021 {
16022 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16023 VAR_INIT_STATUS_INITIALIZED);
16024 if (op1 == 0)
16025 return NULL;
16026 add_loc_descr (&mem_loc_result, op1);
16027 add_loc_descr (&mem_loc_result,
16028 new_loc_descr (DW_OP_plus, 0, 0));
16029 }
16030 }
16031 break;
16032
16033 /* If a pseudo-reg is optimized away, it is possible for it to
16034 be replaced with a MEM containing a multiply or shift. */
16035 case MINUS:
16036 op = DW_OP_minus;
16037 goto do_binop;
16038
16039 case MULT:
16040 op = DW_OP_mul;
16041 goto do_binop;
16042
16043 case DIV:
16044 if ((!dwarf_strict || dwarf_version >= 5)
16045 && is_a <scalar_int_mode> (mode, &int_mode)
16046 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16047 {
16048 mem_loc_result = typed_binop (DW_OP_div, rtl,
16049 base_type_for_mode (mode, 0),
16050 int_mode, mem_mode);
16051 break;
16052 }
16053 op = DW_OP_div;
16054 goto do_binop;
16055
16056 case UMOD:
16057 op = DW_OP_mod;
16058 goto do_binop;
16059
16060 case ASHIFT:
16061 op = DW_OP_shl;
16062 goto do_shift;
16063
16064 case ASHIFTRT:
16065 op = DW_OP_shra;
16066 goto do_shift;
16067
16068 case LSHIFTRT:
16069 op = DW_OP_shr;
16070 goto do_shift;
16071
16072 do_shift:
16073 if (!is_a <scalar_int_mode> (mode, &int_mode))
16074 break;
16075 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
16076 VAR_INIT_STATUS_INITIALIZED);
16077 {
16078 rtx rtlop1 = XEXP (rtl, 1);
16079 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
16080 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
16081 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
16082 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
16083 VAR_INIT_STATUS_INITIALIZED);
16084 }
16085
16086 if (op0 == 0 || op1 == 0)
16087 break;
16088
16089 mem_loc_result = op0;
16090 add_loc_descr (&mem_loc_result, op1);
16091 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16092 break;
16093
16094 case AND:
16095 op = DW_OP_and;
16096 goto do_binop;
16097
16098 case IOR:
16099 op = DW_OP_or;
16100 goto do_binop;
16101
16102 case XOR:
16103 op = DW_OP_xor;
16104 goto do_binop;
16105
16106 do_binop:
16107 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16108 VAR_INIT_STATUS_INITIALIZED);
16109 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16110 VAR_INIT_STATUS_INITIALIZED);
16111
16112 if (op0 == 0 || op1 == 0)
16113 break;
16114
16115 mem_loc_result = op0;
16116 add_loc_descr (&mem_loc_result, op1);
16117 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16118 break;
16119
16120 case MOD:
16121 if ((!dwarf_strict || dwarf_version >= 5)
16122 && is_a <scalar_int_mode> (mode, &int_mode)
16123 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16124 {
16125 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16126 base_type_for_mode (mode, 0),
16127 int_mode, mem_mode);
16128 break;
16129 }
16130
16131 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16132 VAR_INIT_STATUS_INITIALIZED);
16133 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16134 VAR_INIT_STATUS_INITIALIZED);
16135
16136 if (op0 == 0 || op1 == 0)
16137 break;
16138
16139 mem_loc_result = op0;
16140 add_loc_descr (&mem_loc_result, op1);
16141 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16142 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16143 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16144 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16145 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16146 break;
16147
16148 case UDIV:
16149 if ((!dwarf_strict || dwarf_version >= 5)
16150 && is_a <scalar_int_mode> (mode, &int_mode))
16151 {
16152 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16153 {
16154 op = DW_OP_div;
16155 goto do_binop;
16156 }
16157 mem_loc_result = typed_binop (DW_OP_div, rtl,
16158 base_type_for_mode (int_mode, 1),
16159 int_mode, mem_mode);
16160 }
16161 break;
16162
16163 case NOT:
16164 op = DW_OP_not;
16165 goto do_unop;
16166
16167 case ABS:
16168 op = DW_OP_abs;
16169 goto do_unop;
16170
16171 case NEG:
16172 op = DW_OP_neg;
16173 goto do_unop;
16174
16175 do_unop:
16176 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16177 VAR_INIT_STATUS_INITIALIZED);
16178
16179 if (op0 == 0)
16180 break;
16181
16182 mem_loc_result = op0;
16183 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16184 break;
16185
16186 case CONST_INT:
16187 if (!is_a <scalar_int_mode> (mode, &int_mode)
16188 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16189 #ifdef POINTERS_EXTEND_UNSIGNED
16190 || (int_mode == Pmode
16191 && mem_mode != VOIDmode
16192 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16193 #endif
16194 )
16195 {
16196 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16197 break;
16198 }
16199 if ((!dwarf_strict || dwarf_version >= 5)
16200 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16201 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16202 {
16203 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16204 scalar_int_mode amode;
16205 if (type_die == NULL)
16206 return NULL;
16207 if (INTVAL (rtl) >= 0
16208 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16209 .exists (&amode))
16210 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16211 /* const DW_OP_convert <XXX> vs.
16212 DW_OP_const_type <XXX, 1, const>. */
16213 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16214 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16215 {
16216 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16217 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16218 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16219 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16220 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16221 add_loc_descr (&mem_loc_result, op0);
16222 return mem_loc_result;
16223 }
16224 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16225 INTVAL (rtl));
16226 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16227 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16228 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16229 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16230 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16231 else
16232 {
16233 mem_loc_result->dw_loc_oprnd2.val_class
16234 = dw_val_class_const_double;
16235 mem_loc_result->dw_loc_oprnd2.v.val_double
16236 = double_int::from_shwi (INTVAL (rtl));
16237 }
16238 }
16239 break;
16240
16241 case CONST_DOUBLE:
16242 if (!dwarf_strict || dwarf_version >= 5)
16243 {
16244 dw_die_ref type_die;
16245
16246 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16247 CONST_DOUBLE rtx could represent either a large integer
16248 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16249 the value is always a floating point constant.
16250
16251 When it is an integer, a CONST_DOUBLE is used whenever
16252 the constant requires 2 HWIs to be adequately represented.
16253 We output CONST_DOUBLEs as blocks. */
16254 if (mode == VOIDmode
16255 || (GET_MODE (rtl) == VOIDmode
16256 && maybe_ne (GET_MODE_BITSIZE (mode),
16257 HOST_BITS_PER_DOUBLE_INT)))
16258 break;
16259 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16260 if (type_die == NULL)
16261 return NULL;
16262 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16263 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16264 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16265 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16266 #if TARGET_SUPPORTS_WIDE_INT == 0
16267 if (!SCALAR_FLOAT_MODE_P (mode))
16268 {
16269 mem_loc_result->dw_loc_oprnd2.val_class
16270 = dw_val_class_const_double;
16271 mem_loc_result->dw_loc_oprnd2.v.val_double
16272 = rtx_to_double_int (rtl);
16273 }
16274 else
16275 #endif
16276 {
16277 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16278 unsigned int length = GET_MODE_SIZE (float_mode);
16279 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16280
16281 insert_float (rtl, array);
16282 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16283 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16284 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16285 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16286 }
16287 }
16288 break;
16289
16290 case CONST_WIDE_INT:
16291 if (!dwarf_strict || dwarf_version >= 5)
16292 {
16293 dw_die_ref type_die;
16294
16295 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16296 if (type_die == NULL)
16297 return NULL;
16298 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16299 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16300 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16301 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16302 mem_loc_result->dw_loc_oprnd2.val_class
16303 = dw_val_class_wide_int;
16304 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16305 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16306 }
16307 break;
16308
16309 case CONST_POLY_INT:
16310 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16311 break;
16312
16313 case EQ:
16314 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16315 break;
16316
16317 case GE:
16318 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16319 break;
16320
16321 case GT:
16322 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16323 break;
16324
16325 case LE:
16326 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16327 break;
16328
16329 case LT:
16330 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16331 break;
16332
16333 case NE:
16334 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16335 break;
16336
16337 case GEU:
16338 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16339 break;
16340
16341 case GTU:
16342 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16343 break;
16344
16345 case LEU:
16346 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16347 break;
16348
16349 case LTU:
16350 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16351 break;
16352
16353 case UMIN:
16354 case UMAX:
16355 if (!SCALAR_INT_MODE_P (mode))
16356 break;
16357 /* FALLTHRU */
16358 case SMIN:
16359 case SMAX:
16360 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16361 break;
16362
16363 case ZERO_EXTRACT:
16364 case SIGN_EXTRACT:
16365 if (CONST_INT_P (XEXP (rtl, 1))
16366 && CONST_INT_P (XEXP (rtl, 2))
16367 && is_a <scalar_int_mode> (mode, &int_mode)
16368 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16369 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16370 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16371 && ((unsigned) INTVAL (XEXP (rtl, 1))
16372 + (unsigned) INTVAL (XEXP (rtl, 2))
16373 <= GET_MODE_BITSIZE (int_mode)))
16374 {
16375 int shift, size;
16376 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16377 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16378 if (op0 == 0)
16379 break;
16380 if (GET_CODE (rtl) == SIGN_EXTRACT)
16381 op = DW_OP_shra;
16382 else
16383 op = DW_OP_shr;
16384 mem_loc_result = op0;
16385 size = INTVAL (XEXP (rtl, 1));
16386 shift = INTVAL (XEXP (rtl, 2));
16387 if (BITS_BIG_ENDIAN)
16388 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16389 if (shift + size != (int) DWARF2_ADDR_SIZE)
16390 {
16391 add_loc_descr (&mem_loc_result,
16392 int_loc_descriptor (DWARF2_ADDR_SIZE
16393 - shift - size));
16394 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16395 }
16396 if (size != (int) DWARF2_ADDR_SIZE)
16397 {
16398 add_loc_descr (&mem_loc_result,
16399 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16400 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16401 }
16402 }
16403 break;
16404
16405 case IF_THEN_ELSE:
16406 {
16407 dw_loc_descr_ref op2, bra_node, drop_node;
16408 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16409 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16410 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16411 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16412 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16413 VAR_INIT_STATUS_INITIALIZED);
16414 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16415 VAR_INIT_STATUS_INITIALIZED);
16416 if (op0 == NULL || op1 == NULL || op2 == NULL)
16417 break;
16418
16419 mem_loc_result = op1;
16420 add_loc_descr (&mem_loc_result, op2);
16421 add_loc_descr (&mem_loc_result, op0);
16422 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16423 add_loc_descr (&mem_loc_result, bra_node);
16424 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16425 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16426 add_loc_descr (&mem_loc_result, drop_node);
16427 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16428 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16429 }
16430 break;
16431
16432 case FLOAT_EXTEND:
16433 case FLOAT_TRUNCATE:
16434 case FLOAT:
16435 case UNSIGNED_FLOAT:
16436 case FIX:
16437 case UNSIGNED_FIX:
16438 if (!dwarf_strict || dwarf_version >= 5)
16439 {
16440 dw_die_ref type_die;
16441 dw_loc_descr_ref cvt;
16442
16443 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16444 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16445 if (op0 == NULL)
16446 break;
16447 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16448 && (GET_CODE (rtl) == FLOAT
16449 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16450 {
16451 type_die = base_type_for_mode (int_mode,
16452 GET_CODE (rtl) == UNSIGNED_FLOAT);
16453 if (type_die == NULL)
16454 break;
16455 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16456 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16457 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16458 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16459 add_loc_descr (&op0, cvt);
16460 }
16461 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16462 if (type_die == NULL)
16463 break;
16464 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16465 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16466 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16467 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16468 add_loc_descr (&op0, cvt);
16469 if (is_a <scalar_int_mode> (mode, &int_mode)
16470 && (GET_CODE (rtl) == FIX
16471 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16472 {
16473 op0 = convert_descriptor_to_mode (int_mode, op0);
16474 if (op0 == NULL)
16475 break;
16476 }
16477 mem_loc_result = op0;
16478 }
16479 break;
16480
16481 case CLZ:
16482 case CTZ:
16483 case FFS:
16484 if (is_a <scalar_int_mode> (mode, &int_mode))
16485 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16486 break;
16487
16488 case POPCOUNT:
16489 case PARITY:
16490 if (is_a <scalar_int_mode> (mode, &int_mode))
16491 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16492 break;
16493
16494 case BSWAP:
16495 if (is_a <scalar_int_mode> (mode, &int_mode))
16496 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16497 break;
16498
16499 case ROTATE:
16500 case ROTATERT:
16501 if (is_a <scalar_int_mode> (mode, &int_mode))
16502 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16503 break;
16504
16505 case COMPARE:
16506 /* In theory, we could implement the above. */
16507 /* DWARF cannot represent the unsigned compare operations
16508 natively. */
16509 case SS_MULT:
16510 case US_MULT:
16511 case SS_DIV:
16512 case US_DIV:
16513 case SS_PLUS:
16514 case US_PLUS:
16515 case SS_MINUS:
16516 case US_MINUS:
16517 case SS_NEG:
16518 case US_NEG:
16519 case SS_ABS:
16520 case SS_ASHIFT:
16521 case US_ASHIFT:
16522 case SS_TRUNCATE:
16523 case US_TRUNCATE:
16524 case UNORDERED:
16525 case ORDERED:
16526 case UNEQ:
16527 case UNGE:
16528 case UNGT:
16529 case UNLE:
16530 case UNLT:
16531 case LTGT:
16532 case FRACT_CONVERT:
16533 case UNSIGNED_FRACT_CONVERT:
16534 case SAT_FRACT:
16535 case UNSIGNED_SAT_FRACT:
16536 case SQRT:
16537 case ASM_OPERANDS:
16538 case VEC_MERGE:
16539 case VEC_SELECT:
16540 case VEC_CONCAT:
16541 case VEC_DUPLICATE:
16542 case VEC_SERIES:
16543 case HIGH:
16544 case FMA:
16545 case STRICT_LOW_PART:
16546 case CONST_VECTOR:
16547 case CONST_FIXED:
16548 case CLRSB:
16549 case CLOBBER:
16550 break;
16551
16552 case CONST_STRING:
16553 resolve_one_addr (&rtl);
16554 goto symref;
16555
16556 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16557 the expression. An UNSPEC rtx represents a raw DWARF operation,
16558 new_loc_descr is called for it to build the operation directly.
16559 Otherwise mem_loc_descriptor is called recursively. */
16560 case PARALLEL:
16561 {
16562 int index = 0;
16563 dw_loc_descr_ref exp_result = NULL;
16564
16565 for (; index < XVECLEN (rtl, 0); index++)
16566 {
16567 rtx elem = XVECEXP (rtl, 0, index);
16568 if (GET_CODE (elem) == UNSPEC)
16569 {
16570 /* Each DWARF operation UNSPEC contain two operands, if
16571 one operand is not used for the operation, const0_rtx is
16572 passed. */
16573 gcc_assert (XVECLEN (elem, 0) == 2);
16574
16575 HOST_WIDE_INT dw_op = XINT (elem, 1);
16576 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16577 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16578 exp_result
16579 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16580 oprnd2);
16581 }
16582 else
16583 exp_result
16584 = mem_loc_descriptor (elem, mode, mem_mode,
16585 VAR_INIT_STATUS_INITIALIZED);
16586
16587 if (!mem_loc_result)
16588 mem_loc_result = exp_result;
16589 else
16590 add_loc_descr (&mem_loc_result, exp_result);
16591 }
16592
16593 break;
16594 }
16595
16596 default:
16597 if (flag_checking)
16598 {
16599 print_rtl (stderr, rtl);
16600 gcc_unreachable ();
16601 }
16602 break;
16603 }
16604
16605 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16606 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16607
16608 return mem_loc_result;
16609 }
16610
16611 /* Return a descriptor that describes the concatenation of two locations.
16612 This is typically a complex variable. */
16613
16614 static dw_loc_descr_ref
16615 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16616 {
16617 /* At present we only track constant-sized pieces. */
16618 unsigned int size0, size1;
16619 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16620 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16621 return 0;
16622
16623 dw_loc_descr_ref cc_loc_result = NULL;
16624 dw_loc_descr_ref x0_ref
16625 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16626 dw_loc_descr_ref x1_ref
16627 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16628
16629 if (x0_ref == 0 || x1_ref == 0)
16630 return 0;
16631
16632 cc_loc_result = x0_ref;
16633 add_loc_descr_op_piece (&cc_loc_result, size0);
16634
16635 add_loc_descr (&cc_loc_result, x1_ref);
16636 add_loc_descr_op_piece (&cc_loc_result, size1);
16637
16638 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16639 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16640
16641 return cc_loc_result;
16642 }
16643
16644 /* Return a descriptor that describes the concatenation of N
16645 locations. */
16646
16647 static dw_loc_descr_ref
16648 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16649 {
16650 unsigned int i;
16651 dw_loc_descr_ref cc_loc_result = NULL;
16652 unsigned int n = XVECLEN (concatn, 0);
16653 unsigned int size;
16654
16655 for (i = 0; i < n; ++i)
16656 {
16657 dw_loc_descr_ref ref;
16658 rtx x = XVECEXP (concatn, 0, i);
16659
16660 /* At present we only track constant-sized pieces. */
16661 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16662 return NULL;
16663
16664 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16665 if (ref == NULL)
16666 return NULL;
16667
16668 add_loc_descr (&cc_loc_result, ref);
16669 add_loc_descr_op_piece (&cc_loc_result, size);
16670 }
16671
16672 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16673 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16674
16675 return cc_loc_result;
16676 }
16677
16678 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16679 for DEBUG_IMPLICIT_PTR RTL. */
16680
16681 static dw_loc_descr_ref
16682 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16683 {
16684 dw_loc_descr_ref ret;
16685 dw_die_ref ref;
16686
16687 if (dwarf_strict && dwarf_version < 5)
16688 return NULL;
16689 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16690 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16691 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16692 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16693 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16694 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16695 if (ref)
16696 {
16697 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16698 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16699 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16700 }
16701 else
16702 {
16703 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16704 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16705 }
16706 return ret;
16707 }
16708
16709 /* Output a proper Dwarf location descriptor for a variable or parameter
16710 which is either allocated in a register or in a memory location. For a
16711 register, we just generate an OP_REG and the register number. For a
16712 memory location we provide a Dwarf postfix expression describing how to
16713 generate the (dynamic) address of the object onto the address stack.
16714
16715 MODE is mode of the decl if this loc_descriptor is going to be used in
16716 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16717 allowed, VOIDmode otherwise.
16718
16719 If we don't know how to describe it, return 0. */
16720
16721 static dw_loc_descr_ref
16722 loc_descriptor (rtx rtl, machine_mode mode,
16723 enum var_init_status initialized)
16724 {
16725 dw_loc_descr_ref loc_result = NULL;
16726 scalar_int_mode int_mode;
16727
16728 switch (GET_CODE (rtl))
16729 {
16730 case SUBREG:
16731 /* The case of a subreg may arise when we have a local (register)
16732 variable or a formal (register) parameter which doesn't quite fill
16733 up an entire register. For now, just assume that it is
16734 legitimate to make the Dwarf info refer to the whole register which
16735 contains the given subreg. */
16736 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16737 loc_result = loc_descriptor (SUBREG_REG (rtl),
16738 GET_MODE (SUBREG_REG (rtl)), initialized);
16739 else
16740 goto do_default;
16741 break;
16742
16743 case REG:
16744 loc_result = reg_loc_descriptor (rtl, initialized);
16745 break;
16746
16747 case MEM:
16748 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16749 GET_MODE (rtl), initialized);
16750 if (loc_result == NULL)
16751 loc_result = tls_mem_loc_descriptor (rtl);
16752 if (loc_result == NULL)
16753 {
16754 rtx new_rtl = avoid_constant_pool_reference (rtl);
16755 if (new_rtl != rtl)
16756 loc_result = loc_descriptor (new_rtl, mode, initialized);
16757 }
16758 break;
16759
16760 case CONCAT:
16761 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16762 initialized);
16763 break;
16764
16765 case CONCATN:
16766 loc_result = concatn_loc_descriptor (rtl, initialized);
16767 break;
16768
16769 case VAR_LOCATION:
16770 /* Single part. */
16771 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16772 {
16773 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16774 if (GET_CODE (loc) == EXPR_LIST)
16775 loc = XEXP (loc, 0);
16776 loc_result = loc_descriptor (loc, mode, initialized);
16777 break;
16778 }
16779
16780 rtl = XEXP (rtl, 1);
16781 /* FALLTHRU */
16782
16783 case PARALLEL:
16784 {
16785 rtvec par_elems = XVEC (rtl, 0);
16786 int num_elem = GET_NUM_ELEM (par_elems);
16787 machine_mode mode;
16788 int i, size;
16789
16790 /* Create the first one, so we have something to add to. */
16791 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16792 VOIDmode, initialized);
16793 if (loc_result == NULL)
16794 return NULL;
16795 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16796 /* At present we only track constant-sized pieces. */
16797 if (!GET_MODE_SIZE (mode).is_constant (&size))
16798 return NULL;
16799 add_loc_descr_op_piece (&loc_result, size);
16800 for (i = 1; i < num_elem; i++)
16801 {
16802 dw_loc_descr_ref temp;
16803
16804 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16805 VOIDmode, initialized);
16806 if (temp == NULL)
16807 return NULL;
16808 add_loc_descr (&loc_result, temp);
16809 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16810 /* At present we only track constant-sized pieces. */
16811 if (!GET_MODE_SIZE (mode).is_constant (&size))
16812 return NULL;
16813 add_loc_descr_op_piece (&loc_result, size);
16814 }
16815 }
16816 break;
16817
16818 case CONST_INT:
16819 if (mode != VOIDmode && mode != BLKmode)
16820 {
16821 int_mode = as_a <scalar_int_mode> (mode);
16822 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16823 INTVAL (rtl));
16824 }
16825 break;
16826
16827 case CONST_DOUBLE:
16828 if (mode == VOIDmode)
16829 mode = GET_MODE (rtl);
16830
16831 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16832 {
16833 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16834
16835 /* Note that a CONST_DOUBLE rtx could represent either an integer
16836 or a floating-point constant. A CONST_DOUBLE is used whenever
16837 the constant requires more than one word in order to be
16838 adequately represented. We output CONST_DOUBLEs as blocks. */
16839 scalar_mode smode = as_a <scalar_mode> (mode);
16840 loc_result = new_loc_descr (DW_OP_implicit_value,
16841 GET_MODE_SIZE (smode), 0);
16842 #if TARGET_SUPPORTS_WIDE_INT == 0
16843 if (!SCALAR_FLOAT_MODE_P (smode))
16844 {
16845 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16846 loc_result->dw_loc_oprnd2.v.val_double
16847 = rtx_to_double_int (rtl);
16848 }
16849 else
16850 #endif
16851 {
16852 unsigned int length = GET_MODE_SIZE (smode);
16853 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16854
16855 insert_float (rtl, array);
16856 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16857 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16858 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16859 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16860 }
16861 }
16862 break;
16863
16864 case CONST_WIDE_INT:
16865 if (mode == VOIDmode)
16866 mode = GET_MODE (rtl);
16867
16868 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16869 {
16870 int_mode = as_a <scalar_int_mode> (mode);
16871 loc_result = new_loc_descr (DW_OP_implicit_value,
16872 GET_MODE_SIZE (int_mode), 0);
16873 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16874 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16875 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16876 }
16877 break;
16878
16879 case CONST_VECTOR:
16880 if (mode == VOIDmode)
16881 mode = GET_MODE (rtl);
16882
16883 if (mode != VOIDmode
16884 /* The combination of a length and byte elt_size doesn't extend
16885 naturally to boolean vectors, where several elements are packed
16886 into the same byte. */
16887 && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL
16888 && (dwarf_version >= 4 || !dwarf_strict))
16889 {
16890 unsigned int length;
16891 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16892 return NULL;
16893
16894 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16895 unsigned char *array
16896 = ggc_vec_alloc<unsigned char> (length * elt_size);
16897 unsigned int i;
16898 unsigned char *p;
16899 machine_mode imode = GET_MODE_INNER (mode);
16900
16901 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16902 switch (GET_MODE_CLASS (mode))
16903 {
16904 case MODE_VECTOR_INT:
16905 for (i = 0, p = array; i < length; i++, p += elt_size)
16906 {
16907 rtx elt = CONST_VECTOR_ELT (rtl, i);
16908 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16909 }
16910 break;
16911
16912 case MODE_VECTOR_FLOAT:
16913 for (i = 0, p = array; i < length; i++, p += elt_size)
16914 {
16915 rtx elt = CONST_VECTOR_ELT (rtl, i);
16916 insert_float (elt, p);
16917 }
16918 break;
16919
16920 default:
16921 gcc_unreachable ();
16922 }
16923
16924 loc_result = new_loc_descr (DW_OP_implicit_value,
16925 length * elt_size, 0);
16926 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16927 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16928 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16929 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16930 }
16931 break;
16932
16933 case CONST:
16934 if (mode == VOIDmode
16935 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16936 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16937 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16938 {
16939 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16940 break;
16941 }
16942 /* FALLTHROUGH */
16943 case SYMBOL_REF:
16944 if (!const_ok_for_output (rtl))
16945 break;
16946 /* FALLTHROUGH */
16947 case LABEL_REF:
16948 if (is_a <scalar_int_mode> (mode, &int_mode)
16949 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16950 && (dwarf_version >= 4 || !dwarf_strict))
16951 {
16952 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16953 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16954 vec_safe_push (used_rtx_array, rtl);
16955 }
16956 break;
16957
16958 case DEBUG_IMPLICIT_PTR:
16959 loc_result = implicit_ptr_descriptor (rtl, 0);
16960 break;
16961
16962 case PLUS:
16963 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16964 && CONST_INT_P (XEXP (rtl, 1)))
16965 {
16966 loc_result
16967 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16968 break;
16969 }
16970 /* FALLTHRU */
16971 do_default:
16972 default:
16973 if ((is_a <scalar_int_mode> (mode, &int_mode)
16974 && GET_MODE (rtl) == int_mode
16975 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16976 && dwarf_version >= 4)
16977 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16978 {
16979 /* Value expression. */
16980 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16981 if (loc_result)
16982 add_loc_descr (&loc_result,
16983 new_loc_descr (DW_OP_stack_value, 0, 0));
16984 }
16985 break;
16986 }
16987
16988 return loc_result;
16989 }
16990
16991 /* We need to figure out what section we should use as the base for the
16992 address ranges where a given location is valid.
16993 1. If this particular DECL has a section associated with it, use that.
16994 2. If this function has a section associated with it, use that.
16995 3. Otherwise, use the text section.
16996 XXX: If you split a variable across multiple sections, we won't notice. */
16997
16998 static const char *
16999 secname_for_decl (const_tree decl)
17000 {
17001 const char *secname;
17002
17003 if (VAR_OR_FUNCTION_DECL_P (decl)
17004 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
17005 && DECL_SECTION_NAME (decl))
17006 secname = DECL_SECTION_NAME (decl);
17007 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
17008 {
17009 if (in_cold_section_p)
17010 {
17011 section *sec = current_function_section ();
17012 if (sec->common.flags & SECTION_NAMED)
17013 return sec->named.name;
17014 }
17015 secname = DECL_SECTION_NAME (current_function_decl);
17016 }
17017 else if (cfun && in_cold_section_p)
17018 secname = crtl->subsections.cold_section_label;
17019 else
17020 secname = text_section_label;
17021
17022 return secname;
17023 }
17024
17025 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
17026
17027 static bool
17028 decl_by_reference_p (tree decl)
17029 {
17030 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
17031 || VAR_P (decl))
17032 && DECL_BY_REFERENCE (decl));
17033 }
17034
17035 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17036 for VARLOC. */
17037
17038 static dw_loc_descr_ref
17039 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
17040 enum var_init_status initialized)
17041 {
17042 int have_address = 0;
17043 dw_loc_descr_ref descr;
17044 machine_mode mode;
17045
17046 if (want_address != 2)
17047 {
17048 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
17049 /* Single part. */
17050 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17051 {
17052 varloc = PAT_VAR_LOCATION_LOC (varloc);
17053 if (GET_CODE (varloc) == EXPR_LIST)
17054 varloc = XEXP (varloc, 0);
17055 mode = GET_MODE (varloc);
17056 if (MEM_P (varloc))
17057 {
17058 rtx addr = XEXP (varloc, 0);
17059 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
17060 mode, initialized);
17061 if (descr)
17062 have_address = 1;
17063 else
17064 {
17065 rtx x = avoid_constant_pool_reference (varloc);
17066 if (x != varloc)
17067 descr = mem_loc_descriptor (x, mode, VOIDmode,
17068 initialized);
17069 }
17070 }
17071 else
17072 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
17073 }
17074 else
17075 return 0;
17076 }
17077 else
17078 {
17079 if (GET_CODE (varloc) == VAR_LOCATION)
17080 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
17081 else
17082 mode = DECL_MODE (loc);
17083 descr = loc_descriptor (varloc, mode, initialized);
17084 have_address = 1;
17085 }
17086
17087 if (!descr)
17088 return 0;
17089
17090 if (want_address == 2 && !have_address
17091 && (dwarf_version >= 4 || !dwarf_strict))
17092 {
17093 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
17094 {
17095 expansion_failed (loc, NULL_RTX,
17096 "DWARF address size mismatch");
17097 return 0;
17098 }
17099 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
17100 have_address = 1;
17101 }
17102 /* Show if we can't fill the request for an address. */
17103 if (want_address && !have_address)
17104 {
17105 expansion_failed (loc, NULL_RTX,
17106 "Want address and only have value");
17107 return 0;
17108 }
17109
17110 /* If we've got an address and don't want one, dereference. */
17111 if (!want_address && have_address)
17112 {
17113 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
17114 enum dwarf_location_atom op;
17115
17116 if (size > DWARF2_ADDR_SIZE || size == -1)
17117 {
17118 expansion_failed (loc, NULL_RTX,
17119 "DWARF address size mismatch");
17120 return 0;
17121 }
17122 else if (size == DWARF2_ADDR_SIZE)
17123 op = DW_OP_deref;
17124 else
17125 op = DW_OP_deref_size;
17126
17127 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17128 }
17129
17130 return descr;
17131 }
17132
17133 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17134 if it is not possible. */
17135
17136 static dw_loc_descr_ref
17137 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17138 {
17139 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17140 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17141 else if (dwarf_version >= 3 || !dwarf_strict)
17142 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17143 else
17144 return NULL;
17145 }
17146
17147 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17148 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17149
17150 static dw_loc_descr_ref
17151 dw_sra_loc_expr (tree decl, rtx loc)
17152 {
17153 rtx p;
17154 unsigned HOST_WIDE_INT padsize = 0;
17155 dw_loc_descr_ref descr, *descr_tail;
17156 unsigned HOST_WIDE_INT decl_size;
17157 rtx varloc;
17158 enum var_init_status initialized;
17159
17160 if (DECL_SIZE (decl) == NULL
17161 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17162 return NULL;
17163
17164 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17165 descr = NULL;
17166 descr_tail = &descr;
17167
17168 for (p = loc; p; p = XEXP (p, 1))
17169 {
17170 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17171 rtx loc_note = *decl_piece_varloc_ptr (p);
17172 dw_loc_descr_ref cur_descr;
17173 dw_loc_descr_ref *tail, last = NULL;
17174 unsigned HOST_WIDE_INT opsize = 0;
17175
17176 if (loc_note == NULL_RTX
17177 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17178 {
17179 padsize += bitsize;
17180 continue;
17181 }
17182 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17183 varloc = NOTE_VAR_LOCATION (loc_note);
17184 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17185 if (cur_descr == NULL)
17186 {
17187 padsize += bitsize;
17188 continue;
17189 }
17190
17191 /* Check that cur_descr either doesn't use
17192 DW_OP_*piece operations, or their sum is equal
17193 to bitsize. Otherwise we can't embed it. */
17194 for (tail = &cur_descr; *tail != NULL;
17195 tail = &(*tail)->dw_loc_next)
17196 if ((*tail)->dw_loc_opc == DW_OP_piece)
17197 {
17198 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17199 * BITS_PER_UNIT;
17200 last = *tail;
17201 }
17202 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17203 {
17204 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17205 last = *tail;
17206 }
17207
17208 if (last != NULL && opsize != bitsize)
17209 {
17210 padsize += bitsize;
17211 /* Discard the current piece of the descriptor and release any
17212 addr_table entries it uses. */
17213 remove_loc_list_addr_table_entries (cur_descr);
17214 continue;
17215 }
17216
17217 /* If there is a hole, add DW_OP_*piece after empty DWARF
17218 expression, which means that those bits are optimized out. */
17219 if (padsize)
17220 {
17221 if (padsize > decl_size)
17222 {
17223 remove_loc_list_addr_table_entries (cur_descr);
17224 goto discard_descr;
17225 }
17226 decl_size -= padsize;
17227 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17228 if (*descr_tail == NULL)
17229 {
17230 remove_loc_list_addr_table_entries (cur_descr);
17231 goto discard_descr;
17232 }
17233 descr_tail = &(*descr_tail)->dw_loc_next;
17234 padsize = 0;
17235 }
17236 *descr_tail = cur_descr;
17237 descr_tail = tail;
17238 if (bitsize > decl_size)
17239 goto discard_descr;
17240 decl_size -= bitsize;
17241 if (last == NULL)
17242 {
17243 HOST_WIDE_INT offset = 0;
17244 if (GET_CODE (varloc) == VAR_LOCATION
17245 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17246 {
17247 varloc = PAT_VAR_LOCATION_LOC (varloc);
17248 if (GET_CODE (varloc) == EXPR_LIST)
17249 varloc = XEXP (varloc, 0);
17250 }
17251 do
17252 {
17253 if (GET_CODE (varloc) == CONST
17254 || GET_CODE (varloc) == SIGN_EXTEND
17255 || GET_CODE (varloc) == ZERO_EXTEND)
17256 varloc = XEXP (varloc, 0);
17257 else if (GET_CODE (varloc) == SUBREG)
17258 varloc = SUBREG_REG (varloc);
17259 else
17260 break;
17261 }
17262 while (1);
17263 /* DW_OP_bit_size offset should be zero for register
17264 or implicit location descriptions and empty location
17265 descriptions, but for memory addresses needs big endian
17266 adjustment. */
17267 if (MEM_P (varloc))
17268 {
17269 unsigned HOST_WIDE_INT memsize;
17270 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17271 goto discard_descr;
17272 memsize *= BITS_PER_UNIT;
17273 if (memsize != bitsize)
17274 {
17275 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17276 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17277 goto discard_descr;
17278 if (memsize < bitsize)
17279 goto discard_descr;
17280 if (BITS_BIG_ENDIAN)
17281 offset = memsize - bitsize;
17282 }
17283 }
17284
17285 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17286 if (*descr_tail == NULL)
17287 goto discard_descr;
17288 descr_tail = &(*descr_tail)->dw_loc_next;
17289 }
17290 }
17291
17292 /* If there were any non-empty expressions, add padding till the end of
17293 the decl. */
17294 if (descr != NULL && decl_size != 0)
17295 {
17296 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17297 if (*descr_tail == NULL)
17298 goto discard_descr;
17299 }
17300 return descr;
17301
17302 discard_descr:
17303 /* Discard the descriptor and release any addr_table entries it uses. */
17304 remove_loc_list_addr_table_entries (descr);
17305 return NULL;
17306 }
17307
17308 /* Return the dwarf representation of the location list LOC_LIST of
17309 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17310 function. */
17311
17312 static dw_loc_list_ref
17313 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17314 {
17315 const char *endname, *secname;
17316 var_loc_view endview;
17317 rtx varloc;
17318 enum var_init_status initialized;
17319 struct var_loc_node *node;
17320 dw_loc_descr_ref descr;
17321 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17322 dw_loc_list_ref list = NULL;
17323 dw_loc_list_ref *listp = &list;
17324
17325 /* Now that we know what section we are using for a base,
17326 actually construct the list of locations.
17327 The first location information is what is passed to the
17328 function that creates the location list, and the remaining
17329 locations just get added on to that list.
17330 Note that we only know the start address for a location
17331 (IE location changes), so to build the range, we use
17332 the range [current location start, next location start].
17333 This means we have to special case the last node, and generate
17334 a range of [last location start, end of function label]. */
17335
17336 if (cfun && crtl->has_bb_partition)
17337 {
17338 bool save_in_cold_section_p = in_cold_section_p;
17339 in_cold_section_p = first_function_block_is_cold;
17340 if (loc_list->last_before_switch == NULL)
17341 in_cold_section_p = !in_cold_section_p;
17342 secname = secname_for_decl (decl);
17343 in_cold_section_p = save_in_cold_section_p;
17344 }
17345 else
17346 secname = secname_for_decl (decl);
17347
17348 for (node = loc_list->first; node; node = node->next)
17349 {
17350 bool range_across_switch = false;
17351 if (GET_CODE (node->loc) == EXPR_LIST
17352 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17353 {
17354 if (GET_CODE (node->loc) == EXPR_LIST)
17355 {
17356 descr = NULL;
17357 /* This requires DW_OP_{,bit_}piece, which is not usable
17358 inside DWARF expressions. */
17359 if (want_address == 2)
17360 descr = dw_sra_loc_expr (decl, node->loc);
17361 }
17362 else
17363 {
17364 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17365 varloc = NOTE_VAR_LOCATION (node->loc);
17366 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17367 }
17368 if (descr)
17369 {
17370 /* If section switch happens in between node->label
17371 and node->next->label (or end of function) and
17372 we can't emit it as a single entry list,
17373 emit two ranges, first one ending at the end
17374 of first partition and second one starting at the
17375 beginning of second partition. */
17376 if (node == loc_list->last_before_switch
17377 && (node != loc_list->first || loc_list->first->next
17378 /* If we are to emit a view number, we will emit
17379 a loclist rather than a single location
17380 expression for the entire function (see
17381 loc_list_has_views), so we have to split the
17382 range that straddles across partitions. */
17383 || !ZERO_VIEW_P (node->view))
17384 && current_function_decl)
17385 {
17386 endname = cfun->fde->dw_fde_end;
17387 endview = 0;
17388 range_across_switch = true;
17389 }
17390 /* The variable has a location between NODE->LABEL and
17391 NODE->NEXT->LABEL. */
17392 else if (node->next)
17393 endname = node->next->label, endview = node->next->view;
17394 /* If the variable has a location at the last label
17395 it keeps its location until the end of function. */
17396 else if (!current_function_decl)
17397 endname = text_end_label, endview = 0;
17398 else
17399 {
17400 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17401 current_function_funcdef_no);
17402 endname = ggc_strdup (label_id);
17403 endview = 0;
17404 }
17405
17406 *listp = new_loc_list (descr, node->label, node->view,
17407 endname, endview, secname);
17408 if (TREE_CODE (decl) == PARM_DECL
17409 && node == loc_list->first
17410 && NOTE_P (node->loc)
17411 && strcmp (node->label, endname) == 0)
17412 (*listp)->force = true;
17413 listp = &(*listp)->dw_loc_next;
17414 }
17415 }
17416
17417 if (cfun
17418 && crtl->has_bb_partition
17419 && node == loc_list->last_before_switch)
17420 {
17421 bool save_in_cold_section_p = in_cold_section_p;
17422 in_cold_section_p = !first_function_block_is_cold;
17423 secname = secname_for_decl (decl);
17424 in_cold_section_p = save_in_cold_section_p;
17425 }
17426
17427 if (range_across_switch)
17428 {
17429 if (GET_CODE (node->loc) == EXPR_LIST)
17430 descr = dw_sra_loc_expr (decl, node->loc);
17431 else
17432 {
17433 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17434 varloc = NOTE_VAR_LOCATION (node->loc);
17435 descr = dw_loc_list_1 (decl, varloc, want_address,
17436 initialized);
17437 }
17438 gcc_assert (descr);
17439 /* The variable has a location between NODE->LABEL and
17440 NODE->NEXT->LABEL. */
17441 if (node->next)
17442 endname = node->next->label, endview = node->next->view;
17443 else
17444 endname = cfun->fde->dw_fde_second_end, endview = 0;
17445 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17446 endname, endview, secname);
17447 listp = &(*listp)->dw_loc_next;
17448 }
17449 }
17450
17451 /* Try to avoid the overhead of a location list emitting a location
17452 expression instead, but only if we didn't have more than one
17453 location entry in the first place. If some entries were not
17454 representable, we don't want to pretend a single entry that was
17455 applies to the entire scope in which the variable is
17456 available. */
17457 if (list && loc_list->first->next)
17458 gen_llsym (list);
17459 else
17460 maybe_gen_llsym (list);
17461
17462 return list;
17463 }
17464
17465 /* Return if the loc_list has only single element and thus can be represented
17466 as location description. */
17467
17468 static bool
17469 single_element_loc_list_p (dw_loc_list_ref list)
17470 {
17471 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17472 return !list->ll_symbol;
17473 }
17474
17475 /* Duplicate a single element of location list. */
17476
17477 static inline dw_loc_descr_ref
17478 copy_loc_descr (dw_loc_descr_ref ref)
17479 {
17480 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17481 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17482 return copy;
17483 }
17484
17485 /* To each location in list LIST append loc descr REF. */
17486
17487 static void
17488 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17489 {
17490 dw_loc_descr_ref copy;
17491 add_loc_descr (&list->expr, ref);
17492 list = list->dw_loc_next;
17493 while (list)
17494 {
17495 copy = copy_loc_descr (ref);
17496 add_loc_descr (&list->expr, copy);
17497 while (copy->dw_loc_next)
17498 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17499 list = list->dw_loc_next;
17500 }
17501 }
17502
17503 /* To each location in list LIST prepend loc descr REF. */
17504
17505 static void
17506 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17507 {
17508 dw_loc_descr_ref copy;
17509 dw_loc_descr_ref ref_end = list->expr;
17510 add_loc_descr (&ref, list->expr);
17511 list->expr = ref;
17512 list = list->dw_loc_next;
17513 while (list)
17514 {
17515 dw_loc_descr_ref end = list->expr;
17516 list->expr = copy = copy_loc_descr (ref);
17517 while (copy->dw_loc_next != ref_end)
17518 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17519 copy->dw_loc_next = end;
17520 list = list->dw_loc_next;
17521 }
17522 }
17523
17524 /* Given two lists RET and LIST
17525 produce location list that is result of adding expression in LIST
17526 to expression in RET on each position in program.
17527 Might be destructive on both RET and LIST.
17528
17529 TODO: We handle only simple cases of RET or LIST having at most one
17530 element. General case would involve sorting the lists in program order
17531 and merging them that will need some additional work.
17532 Adding that will improve quality of debug info especially for SRA-ed
17533 structures. */
17534
17535 static void
17536 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17537 {
17538 if (!list)
17539 return;
17540 if (!*ret)
17541 {
17542 *ret = list;
17543 return;
17544 }
17545 if (!list->dw_loc_next)
17546 {
17547 add_loc_descr_to_each (*ret, list->expr);
17548 return;
17549 }
17550 if (!(*ret)->dw_loc_next)
17551 {
17552 prepend_loc_descr_to_each (list, (*ret)->expr);
17553 *ret = list;
17554 return;
17555 }
17556 expansion_failed (NULL_TREE, NULL_RTX,
17557 "Don't know how to merge two non-trivial"
17558 " location lists.\n");
17559 *ret = NULL;
17560 return;
17561 }
17562
17563 /* LOC is constant expression. Try a luck, look it up in constant
17564 pool and return its loc_descr of its address. */
17565
17566 static dw_loc_descr_ref
17567 cst_pool_loc_descr (tree loc)
17568 {
17569 /* Get an RTL for this, if something has been emitted. */
17570 rtx rtl = lookup_constant_def (loc);
17571
17572 if (!rtl || !MEM_P (rtl))
17573 {
17574 gcc_assert (!rtl);
17575 return 0;
17576 }
17577 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17578
17579 /* TODO: We might get more coverage if we was actually delaying expansion
17580 of all expressions till end of compilation when constant pools are fully
17581 populated. */
17582 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17583 {
17584 expansion_failed (loc, NULL_RTX,
17585 "CST value in contant pool but not marked.");
17586 return 0;
17587 }
17588 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17589 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17590 }
17591
17592 /* Return dw_loc_list representing address of addr_expr LOC
17593 by looking for inner INDIRECT_REF expression and turning
17594 it into simple arithmetics.
17595
17596 See loc_list_from_tree for the meaning of CONTEXT. */
17597
17598 static dw_loc_list_ref
17599 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17600 loc_descr_context *context)
17601 {
17602 tree obj, offset;
17603 poly_int64 bitsize, bitpos, bytepos;
17604 machine_mode mode;
17605 int unsignedp, reversep, volatilep = 0;
17606 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17607
17608 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17609 &bitsize, &bitpos, &offset, &mode,
17610 &unsignedp, &reversep, &volatilep);
17611 STRIP_NOPS (obj);
17612 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17613 {
17614 expansion_failed (loc, NULL_RTX, "bitfield access");
17615 return 0;
17616 }
17617 if (!INDIRECT_REF_P (obj))
17618 {
17619 expansion_failed (obj,
17620 NULL_RTX, "no indirect ref in inner refrence");
17621 return 0;
17622 }
17623 if (!offset && known_eq (bitpos, 0))
17624 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17625 context);
17626 else if (toplev
17627 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17628 && (dwarf_version >= 4 || !dwarf_strict))
17629 {
17630 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17631 if (!list_ret)
17632 return 0;
17633 if (offset)
17634 {
17635 /* Variable offset. */
17636 list_ret1 = loc_list_from_tree (offset, 0, context);
17637 if (list_ret1 == 0)
17638 return 0;
17639 add_loc_list (&list_ret, list_ret1);
17640 if (!list_ret)
17641 return 0;
17642 add_loc_descr_to_each (list_ret,
17643 new_loc_descr (DW_OP_plus, 0, 0));
17644 }
17645 HOST_WIDE_INT value;
17646 if (bytepos.is_constant (&value) && value > 0)
17647 add_loc_descr_to_each (list_ret,
17648 new_loc_descr (DW_OP_plus_uconst, value, 0));
17649 else if (maybe_ne (bytepos, 0))
17650 loc_list_plus_const (list_ret, bytepos);
17651 add_loc_descr_to_each (list_ret,
17652 new_loc_descr (DW_OP_stack_value, 0, 0));
17653 }
17654 return list_ret;
17655 }
17656
17657 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17658 all operations from LOC are nops, move to the last one. Insert in NOPS all
17659 operations that are skipped. */
17660
17661 static void
17662 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17663 hash_set<dw_loc_descr_ref> &nops)
17664 {
17665 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17666 {
17667 nops.add (loc);
17668 loc = loc->dw_loc_next;
17669 }
17670 }
17671
17672 /* Helper for loc_descr_without_nops: free the location description operation
17673 P. */
17674
17675 bool
17676 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17677 {
17678 ggc_free (loc);
17679 return true;
17680 }
17681
17682 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17683 finishes LOC. */
17684
17685 static void
17686 loc_descr_without_nops (dw_loc_descr_ref &loc)
17687 {
17688 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17689 return;
17690
17691 /* Set of all DW_OP_nop operations we remove. */
17692 hash_set<dw_loc_descr_ref> nops;
17693
17694 /* First, strip all prefix NOP operations in order to keep the head of the
17695 operations list. */
17696 loc_descr_to_next_no_nop (loc, nops);
17697
17698 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17699 {
17700 /* For control flow operations: strip "prefix" nops in destination
17701 labels. */
17702 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17703 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17704 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17705 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17706
17707 /* Do the same for the operations that follow, then move to the next
17708 iteration. */
17709 if (cur->dw_loc_next != NULL)
17710 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17711 cur = cur->dw_loc_next;
17712 }
17713
17714 nops.traverse<void *, free_loc_descr> (NULL);
17715 }
17716
17717
17718 struct dwarf_procedure_info;
17719
17720 /* Helper structure for location descriptions generation. */
17721 struct loc_descr_context
17722 {
17723 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17724 NULL_TREE if DW_OP_push_object_address in invalid for this location
17725 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17726 tree context_type;
17727 /* The ..._DECL node that should be translated as a
17728 DW_OP_push_object_address operation. */
17729 tree base_decl;
17730 /* Information about the DWARF procedure we are currently generating. NULL if
17731 we are not generating a DWARF procedure. */
17732 struct dwarf_procedure_info *dpi;
17733 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17734 by consumer. Used for DW_TAG_generic_subrange attributes. */
17735 bool placeholder_arg;
17736 /* True if PLACEHOLDER_EXPR has been seen. */
17737 bool placeholder_seen;
17738 };
17739
17740 /* DWARF procedures generation
17741
17742 DWARF expressions (aka. location descriptions) are used to encode variable
17743 things such as sizes or offsets. Such computations can have redundant parts
17744 that can be factorized in order to reduce the size of the output debug
17745 information. This is the whole point of DWARF procedures.
17746
17747 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17748 already factorized into functions ("size functions") in order to handle very
17749 big and complex types. Such functions are quite simple: they have integral
17750 arguments, they return an integral result and their body contains only a
17751 return statement with arithmetic expressions. This is the only kind of
17752 function we are interested in translating into DWARF procedures, here.
17753
17754 DWARF expressions and DWARF procedure are executed using a stack, so we have
17755 to define some calling convention for them to interact. Let's say that:
17756
17757 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17758 all arguments in reverse order (right-to-left) so that when the DWARF
17759 procedure execution starts, the first argument is the top of the stack.
17760
17761 - Then, when returning, the DWARF procedure must have consumed all arguments
17762 on the stack, must have pushed the result and touched nothing else.
17763
17764 - Each integral argument and the result are integral types can be hold in a
17765 single stack slot.
17766
17767 - We call "frame offset" the number of stack slots that are "under DWARF
17768 procedure control": it includes the arguments slots, the temporaries and
17769 the result slot. Thus, it is equal to the number of arguments when the
17770 procedure execution starts and must be equal to one (the result) when it
17771 returns. */
17772
17773 /* Helper structure used when generating operations for a DWARF procedure. */
17774 struct dwarf_procedure_info
17775 {
17776 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17777 currently translated. */
17778 tree fndecl;
17779 /* The number of arguments FNDECL takes. */
17780 unsigned args_count;
17781 };
17782
17783 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17784 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17785 equate it to this DIE. */
17786
17787 static dw_die_ref
17788 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17789 dw_die_ref parent_die)
17790 {
17791 dw_die_ref dwarf_proc_die;
17792
17793 if ((dwarf_version < 3 && dwarf_strict)
17794 || location == NULL)
17795 return NULL;
17796
17797 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17798 if (fndecl)
17799 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17800 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17801 return dwarf_proc_die;
17802 }
17803
17804 /* Return whether TYPE is a supported type as a DWARF procedure argument
17805 type or return type (we handle only scalar types and pointer types that
17806 aren't wider than the DWARF expression evaluation stack. */
17807
17808 static bool
17809 is_handled_procedure_type (tree type)
17810 {
17811 return ((INTEGRAL_TYPE_P (type)
17812 || TREE_CODE (type) == OFFSET_TYPE
17813 || TREE_CODE (type) == POINTER_TYPE)
17814 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17815 }
17816
17817 /* Helper for resolve_args_picking: do the same but stop when coming across
17818 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17819 offset *before* evaluating the corresponding operation. */
17820
17821 static bool
17822 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17823 struct dwarf_procedure_info *dpi,
17824 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17825 {
17826 /* The "frame_offset" identifier is already used to name a macro... */
17827 unsigned frame_offset_ = initial_frame_offset;
17828 dw_loc_descr_ref l;
17829
17830 for (l = loc; l != NULL;)
17831 {
17832 bool existed;
17833 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17834
17835 /* If we already met this node, there is nothing to compute anymore. */
17836 if (existed)
17837 {
17838 /* Make sure that the stack size is consistent wherever the execution
17839 flow comes from. */
17840 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17841 break;
17842 }
17843 l_frame_offset = frame_offset_;
17844
17845 /* If needed, relocate the picking offset with respect to the frame
17846 offset. */
17847 if (l->frame_offset_rel)
17848 {
17849 unsigned HOST_WIDE_INT off;
17850 switch (l->dw_loc_opc)
17851 {
17852 case DW_OP_pick:
17853 off = l->dw_loc_oprnd1.v.val_unsigned;
17854 break;
17855 case DW_OP_dup:
17856 off = 0;
17857 break;
17858 case DW_OP_over:
17859 off = 1;
17860 break;
17861 default:
17862 gcc_unreachable ();
17863 }
17864 /* frame_offset_ is the size of the current stack frame, including
17865 incoming arguments. Besides, the arguments are pushed
17866 right-to-left. Thus, in order to access the Nth argument from
17867 this operation node, the picking has to skip temporaries *plus*
17868 one stack slot per argument (0 for the first one, 1 for the second
17869 one, etc.).
17870
17871 The targetted argument number (N) is already set as the operand,
17872 and the number of temporaries can be computed with:
17873 frame_offsets_ - dpi->args_count */
17874 off += frame_offset_ - dpi->args_count;
17875
17876 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17877 if (off > 255)
17878 return false;
17879
17880 if (off == 0)
17881 {
17882 l->dw_loc_opc = DW_OP_dup;
17883 l->dw_loc_oprnd1.v.val_unsigned = 0;
17884 }
17885 else if (off == 1)
17886 {
17887 l->dw_loc_opc = DW_OP_over;
17888 l->dw_loc_oprnd1.v.val_unsigned = 0;
17889 }
17890 else
17891 {
17892 l->dw_loc_opc = DW_OP_pick;
17893 l->dw_loc_oprnd1.v.val_unsigned = off;
17894 }
17895 }
17896
17897 /* Update frame_offset according to the effect the current operation has
17898 on the stack. */
17899 switch (l->dw_loc_opc)
17900 {
17901 case DW_OP_deref:
17902 case DW_OP_swap:
17903 case DW_OP_rot:
17904 case DW_OP_abs:
17905 case DW_OP_neg:
17906 case DW_OP_not:
17907 case DW_OP_plus_uconst:
17908 case DW_OP_skip:
17909 case DW_OP_reg0:
17910 case DW_OP_reg1:
17911 case DW_OP_reg2:
17912 case DW_OP_reg3:
17913 case DW_OP_reg4:
17914 case DW_OP_reg5:
17915 case DW_OP_reg6:
17916 case DW_OP_reg7:
17917 case DW_OP_reg8:
17918 case DW_OP_reg9:
17919 case DW_OP_reg10:
17920 case DW_OP_reg11:
17921 case DW_OP_reg12:
17922 case DW_OP_reg13:
17923 case DW_OP_reg14:
17924 case DW_OP_reg15:
17925 case DW_OP_reg16:
17926 case DW_OP_reg17:
17927 case DW_OP_reg18:
17928 case DW_OP_reg19:
17929 case DW_OP_reg20:
17930 case DW_OP_reg21:
17931 case DW_OP_reg22:
17932 case DW_OP_reg23:
17933 case DW_OP_reg24:
17934 case DW_OP_reg25:
17935 case DW_OP_reg26:
17936 case DW_OP_reg27:
17937 case DW_OP_reg28:
17938 case DW_OP_reg29:
17939 case DW_OP_reg30:
17940 case DW_OP_reg31:
17941 case DW_OP_bregx:
17942 case DW_OP_piece:
17943 case DW_OP_deref_size:
17944 case DW_OP_nop:
17945 case DW_OP_bit_piece:
17946 case DW_OP_implicit_value:
17947 case DW_OP_stack_value:
17948 break;
17949
17950 case DW_OP_addr:
17951 case DW_OP_const1u:
17952 case DW_OP_const1s:
17953 case DW_OP_const2u:
17954 case DW_OP_const2s:
17955 case DW_OP_const4u:
17956 case DW_OP_const4s:
17957 case DW_OP_const8u:
17958 case DW_OP_const8s:
17959 case DW_OP_constu:
17960 case DW_OP_consts:
17961 case DW_OP_dup:
17962 case DW_OP_over:
17963 case DW_OP_pick:
17964 case DW_OP_lit0:
17965 case DW_OP_lit1:
17966 case DW_OP_lit2:
17967 case DW_OP_lit3:
17968 case DW_OP_lit4:
17969 case DW_OP_lit5:
17970 case DW_OP_lit6:
17971 case DW_OP_lit7:
17972 case DW_OP_lit8:
17973 case DW_OP_lit9:
17974 case DW_OP_lit10:
17975 case DW_OP_lit11:
17976 case DW_OP_lit12:
17977 case DW_OP_lit13:
17978 case DW_OP_lit14:
17979 case DW_OP_lit15:
17980 case DW_OP_lit16:
17981 case DW_OP_lit17:
17982 case DW_OP_lit18:
17983 case DW_OP_lit19:
17984 case DW_OP_lit20:
17985 case DW_OP_lit21:
17986 case DW_OP_lit22:
17987 case DW_OP_lit23:
17988 case DW_OP_lit24:
17989 case DW_OP_lit25:
17990 case DW_OP_lit26:
17991 case DW_OP_lit27:
17992 case DW_OP_lit28:
17993 case DW_OP_lit29:
17994 case DW_OP_lit30:
17995 case DW_OP_lit31:
17996 case DW_OP_breg0:
17997 case DW_OP_breg1:
17998 case DW_OP_breg2:
17999 case DW_OP_breg3:
18000 case DW_OP_breg4:
18001 case DW_OP_breg5:
18002 case DW_OP_breg6:
18003 case DW_OP_breg7:
18004 case DW_OP_breg8:
18005 case DW_OP_breg9:
18006 case DW_OP_breg10:
18007 case DW_OP_breg11:
18008 case DW_OP_breg12:
18009 case DW_OP_breg13:
18010 case DW_OP_breg14:
18011 case DW_OP_breg15:
18012 case DW_OP_breg16:
18013 case DW_OP_breg17:
18014 case DW_OP_breg18:
18015 case DW_OP_breg19:
18016 case DW_OP_breg20:
18017 case DW_OP_breg21:
18018 case DW_OP_breg22:
18019 case DW_OP_breg23:
18020 case DW_OP_breg24:
18021 case DW_OP_breg25:
18022 case DW_OP_breg26:
18023 case DW_OP_breg27:
18024 case DW_OP_breg28:
18025 case DW_OP_breg29:
18026 case DW_OP_breg30:
18027 case DW_OP_breg31:
18028 case DW_OP_fbreg:
18029 case DW_OP_push_object_address:
18030 case DW_OP_call_frame_cfa:
18031 case DW_OP_GNU_variable_value:
18032 case DW_OP_GNU_addr_index:
18033 case DW_OP_GNU_const_index:
18034 ++frame_offset_;
18035 break;
18036
18037 case DW_OP_drop:
18038 case DW_OP_xderef:
18039 case DW_OP_and:
18040 case DW_OP_div:
18041 case DW_OP_minus:
18042 case DW_OP_mod:
18043 case DW_OP_mul:
18044 case DW_OP_or:
18045 case DW_OP_plus:
18046 case DW_OP_shl:
18047 case DW_OP_shr:
18048 case DW_OP_shra:
18049 case DW_OP_xor:
18050 case DW_OP_bra:
18051 case DW_OP_eq:
18052 case DW_OP_ge:
18053 case DW_OP_gt:
18054 case DW_OP_le:
18055 case DW_OP_lt:
18056 case DW_OP_ne:
18057 case DW_OP_regx:
18058 case DW_OP_xderef_size:
18059 --frame_offset_;
18060 break;
18061
18062 case DW_OP_call2:
18063 case DW_OP_call4:
18064 case DW_OP_call_ref:
18065 {
18066 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
18067 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
18068
18069 if (stack_usage == NULL)
18070 return false;
18071 frame_offset_ += *stack_usage;
18072 break;
18073 }
18074
18075 case DW_OP_implicit_pointer:
18076 case DW_OP_entry_value:
18077 case DW_OP_const_type:
18078 case DW_OP_regval_type:
18079 case DW_OP_deref_type:
18080 case DW_OP_convert:
18081 case DW_OP_reinterpret:
18082 case DW_OP_form_tls_address:
18083 case DW_OP_GNU_push_tls_address:
18084 case DW_OP_GNU_uninit:
18085 case DW_OP_GNU_encoded_addr:
18086 case DW_OP_GNU_implicit_pointer:
18087 case DW_OP_GNU_entry_value:
18088 case DW_OP_GNU_const_type:
18089 case DW_OP_GNU_regval_type:
18090 case DW_OP_GNU_deref_type:
18091 case DW_OP_GNU_convert:
18092 case DW_OP_GNU_reinterpret:
18093 case DW_OP_GNU_parameter_ref:
18094 /* loc_list_from_tree will probably not output these operations for
18095 size functions, so assume they will not appear here. */
18096 /* Fall through... */
18097
18098 default:
18099 gcc_unreachable ();
18100 }
18101
18102 /* Now, follow the control flow (except subroutine calls). */
18103 switch (l->dw_loc_opc)
18104 {
18105 case DW_OP_bra:
18106 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
18107 frame_offsets))
18108 return false;
18109 /* Fall through. */
18110
18111 case DW_OP_skip:
18112 l = l->dw_loc_oprnd1.v.val_loc;
18113 break;
18114
18115 case DW_OP_stack_value:
18116 return true;
18117
18118 default:
18119 l = l->dw_loc_next;
18120 break;
18121 }
18122 }
18123
18124 return true;
18125 }
18126
18127 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18128 operations) in order to resolve the operand of DW_OP_pick operations that
18129 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18130 offset *before* LOC is executed. Return if all relocations were
18131 successful. */
18132
18133 static bool
18134 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18135 struct dwarf_procedure_info *dpi)
18136 {
18137 /* Associate to all visited operations the frame offset *before* evaluating
18138 this operation. */
18139 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18140
18141 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18142 frame_offsets);
18143 }
18144
18145 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18146 Return NULL if it is not possible. */
18147
18148 static dw_die_ref
18149 function_to_dwarf_procedure (tree fndecl)
18150 {
18151 struct loc_descr_context ctx;
18152 struct dwarf_procedure_info dpi;
18153 dw_die_ref dwarf_proc_die;
18154 tree tree_body = DECL_SAVED_TREE (fndecl);
18155 dw_loc_descr_ref loc_body, epilogue;
18156
18157 tree cursor;
18158 unsigned i;
18159
18160 /* Do not generate multiple DWARF procedures for the same function
18161 declaration. */
18162 dwarf_proc_die = lookup_decl_die (fndecl);
18163 if (dwarf_proc_die != NULL)
18164 return dwarf_proc_die;
18165
18166 /* DWARF procedures are available starting with the DWARFv3 standard. */
18167 if (dwarf_version < 3 && dwarf_strict)
18168 return NULL;
18169
18170 /* We handle only functions for which we still have a body, that return a
18171 supported type and that takes arguments with supported types. Note that
18172 there is no point translating functions that return nothing. */
18173 if (tree_body == NULL_TREE
18174 || DECL_RESULT (fndecl) == NULL_TREE
18175 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18176 return NULL;
18177
18178 for (cursor = DECL_ARGUMENTS (fndecl);
18179 cursor != NULL_TREE;
18180 cursor = TREE_CHAIN (cursor))
18181 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18182 return NULL;
18183
18184 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18185 if (TREE_CODE (tree_body) != RETURN_EXPR)
18186 return NULL;
18187 tree_body = TREE_OPERAND (tree_body, 0);
18188 if (TREE_CODE (tree_body) != MODIFY_EXPR
18189 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18190 return NULL;
18191 tree_body = TREE_OPERAND (tree_body, 1);
18192
18193 /* Try to translate the body expression itself. Note that this will probably
18194 cause an infinite recursion if its call graph has a cycle. This is very
18195 unlikely for size functions, however, so don't bother with such things at
18196 the moment. */
18197 ctx.context_type = NULL_TREE;
18198 ctx.base_decl = NULL_TREE;
18199 ctx.dpi = &dpi;
18200 ctx.placeholder_arg = false;
18201 ctx.placeholder_seen = false;
18202 dpi.fndecl = fndecl;
18203 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18204 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18205 if (!loc_body)
18206 return NULL;
18207
18208 /* After evaluating all operands in "loc_body", we should still have on the
18209 stack all arguments plus the desired function result (top of the stack).
18210 Generate code in order to keep only the result in our stack frame. */
18211 epilogue = NULL;
18212 for (i = 0; i < dpi.args_count; ++i)
18213 {
18214 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18215 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18216 op_couple->dw_loc_next->dw_loc_next = epilogue;
18217 epilogue = op_couple;
18218 }
18219 add_loc_descr (&loc_body, epilogue);
18220 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18221 return NULL;
18222
18223 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18224 because they are considered useful. Now there is an epilogue, they are
18225 not anymore, so give it another try. */
18226 loc_descr_without_nops (loc_body);
18227
18228 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18229 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18230 though, given that size functions do not come from source, so they should
18231 not have a dedicated DW_TAG_subprogram DIE. */
18232 dwarf_proc_die
18233 = new_dwarf_proc_die (loc_body, fndecl,
18234 get_context_die (DECL_CONTEXT (fndecl)));
18235
18236 /* The called DWARF procedure consumes one stack slot per argument and
18237 returns one stack slot. */
18238 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18239
18240 return dwarf_proc_die;
18241 }
18242
18243
18244 /* Generate Dwarf location list representing LOC.
18245 If WANT_ADDRESS is false, expression computing LOC will be computed
18246 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18247 if WANT_ADDRESS is 2, expression computing address useable in location
18248 will be returned (i.e. DW_OP_reg can be used
18249 to refer to register values).
18250
18251 CONTEXT provides information to customize the location descriptions
18252 generation. Its context_type field specifies what type is implicitly
18253 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18254 will not be generated.
18255
18256 Its DPI field determines whether we are generating a DWARF expression for a
18257 DWARF procedure, so PARM_DECL references are processed specifically.
18258
18259 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18260 and dpi fields were null. */
18261
18262 static dw_loc_list_ref
18263 loc_list_from_tree_1 (tree loc, int want_address,
18264 struct loc_descr_context *context)
18265 {
18266 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18267 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18268 int have_address = 0;
18269 enum dwarf_location_atom op;
18270
18271 /* ??? Most of the time we do not take proper care for sign/zero
18272 extending the values properly. Hopefully this won't be a real
18273 problem... */
18274
18275 if (context != NULL
18276 && context->base_decl == loc
18277 && want_address == 0)
18278 {
18279 if (dwarf_version >= 3 || !dwarf_strict)
18280 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18281 NULL, 0, NULL, 0, NULL);
18282 else
18283 return NULL;
18284 }
18285
18286 switch (TREE_CODE (loc))
18287 {
18288 case ERROR_MARK:
18289 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18290 return 0;
18291
18292 case PLACEHOLDER_EXPR:
18293 /* This case involves extracting fields from an object to determine the
18294 position of other fields. It is supposed to appear only as the first
18295 operand of COMPONENT_REF nodes and to reference precisely the type
18296 that the context allows or its enclosing type. */
18297 if (context != NULL
18298 && (TREE_TYPE (loc) == context->context_type
18299 || TREE_TYPE (loc) == TYPE_CONTEXT (context->context_type))
18300 && want_address >= 1)
18301 {
18302 if (dwarf_version >= 3 || !dwarf_strict)
18303 {
18304 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18305 have_address = 1;
18306 break;
18307 }
18308 else
18309 return NULL;
18310 }
18311 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18312 the single argument passed by consumer. */
18313 else if (context != NULL
18314 && context->placeholder_arg
18315 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18316 && want_address == 0)
18317 {
18318 ret = new_loc_descr (DW_OP_pick, 0, 0);
18319 ret->frame_offset_rel = 1;
18320 context->placeholder_seen = true;
18321 break;
18322 }
18323 else
18324 expansion_failed (loc, NULL_RTX,
18325 "PLACEHOLDER_EXPR for an unexpected type");
18326 break;
18327
18328 case CALL_EXPR:
18329 {
18330 const int nargs = call_expr_nargs (loc);
18331 tree callee = get_callee_fndecl (loc);
18332 int i;
18333 dw_die_ref dwarf_proc;
18334
18335 if (callee == NULL_TREE)
18336 goto call_expansion_failed;
18337
18338 /* We handle only functions that return an integer. */
18339 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18340 goto call_expansion_failed;
18341
18342 dwarf_proc = function_to_dwarf_procedure (callee);
18343 if (dwarf_proc == NULL)
18344 goto call_expansion_failed;
18345
18346 /* Evaluate arguments right-to-left so that the first argument will
18347 be the top-most one on the stack. */
18348 for (i = nargs - 1; i >= 0; --i)
18349 {
18350 dw_loc_descr_ref loc_descr
18351 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18352 context);
18353
18354 if (loc_descr == NULL)
18355 goto call_expansion_failed;
18356
18357 add_loc_descr (&ret, loc_descr);
18358 }
18359
18360 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18361 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18362 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18363 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18364 add_loc_descr (&ret, ret1);
18365 break;
18366
18367 call_expansion_failed:
18368 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18369 /* There are no opcodes for these operations. */
18370 return 0;
18371 }
18372
18373 case PREINCREMENT_EXPR:
18374 case PREDECREMENT_EXPR:
18375 case POSTINCREMENT_EXPR:
18376 case POSTDECREMENT_EXPR:
18377 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18378 /* There are no opcodes for these operations. */
18379 return 0;
18380
18381 case ADDR_EXPR:
18382 /* If we already want an address, see if there is INDIRECT_REF inside
18383 e.g. for &this->field. */
18384 if (want_address)
18385 {
18386 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18387 (loc, want_address == 2, context);
18388 if (list_ret)
18389 have_address = 1;
18390 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18391 && (ret = cst_pool_loc_descr (loc)))
18392 have_address = 1;
18393 }
18394 /* Otherwise, process the argument and look for the address. */
18395 if (!list_ret && !ret)
18396 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18397 else
18398 {
18399 if (want_address)
18400 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18401 return NULL;
18402 }
18403 break;
18404
18405 case VAR_DECL:
18406 if (DECL_THREAD_LOCAL_P (loc))
18407 {
18408 rtx rtl;
18409 enum dwarf_location_atom tls_op;
18410 enum dtprel_bool dtprel = dtprel_false;
18411
18412 if (targetm.have_tls)
18413 {
18414 /* If this is not defined, we have no way to emit the
18415 data. */
18416 if (!targetm.asm_out.output_dwarf_dtprel)
18417 return 0;
18418
18419 /* The way DW_OP_GNU_push_tls_address is specified, we
18420 can only look up addresses of objects in the current
18421 module. We used DW_OP_addr as first op, but that's
18422 wrong, because DW_OP_addr is relocated by the debug
18423 info consumer, while DW_OP_GNU_push_tls_address
18424 operand shouldn't be. */
18425 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18426 return 0;
18427 dtprel = dtprel_true;
18428 /* We check for DWARF 5 here because gdb did not implement
18429 DW_OP_form_tls_address until after 7.12. */
18430 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18431 : DW_OP_GNU_push_tls_address);
18432 }
18433 else
18434 {
18435 if (!targetm.emutls.debug_form_tls_address
18436 || !(dwarf_version >= 3 || !dwarf_strict))
18437 return 0;
18438 /* We stuffed the control variable into the DECL_VALUE_EXPR
18439 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18440 no longer appear in gimple code. We used the control
18441 variable in specific so that we could pick it up here. */
18442 loc = DECL_VALUE_EXPR (loc);
18443 tls_op = DW_OP_form_tls_address;
18444 }
18445
18446 rtl = rtl_for_decl_location (loc);
18447 if (rtl == NULL_RTX)
18448 return 0;
18449
18450 if (!MEM_P (rtl))
18451 return 0;
18452 rtl = XEXP (rtl, 0);
18453 if (! CONSTANT_P (rtl))
18454 return 0;
18455
18456 ret = new_addr_loc_descr (rtl, dtprel);
18457 ret1 = new_loc_descr (tls_op, 0, 0);
18458 add_loc_descr (&ret, ret1);
18459
18460 have_address = 1;
18461 break;
18462 }
18463 /* FALLTHRU */
18464
18465 case PARM_DECL:
18466 if (context != NULL && context->dpi != NULL
18467 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18468 {
18469 /* We are generating code for a DWARF procedure and we want to access
18470 one of its arguments: find the appropriate argument offset and let
18471 the resolve_args_picking pass compute the offset that complies
18472 with the stack frame size. */
18473 unsigned i = 0;
18474 tree cursor;
18475
18476 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18477 cursor != NULL_TREE && cursor != loc;
18478 cursor = TREE_CHAIN (cursor), ++i)
18479 ;
18480 /* If we are translating a DWARF procedure, all referenced parameters
18481 must belong to the current function. */
18482 gcc_assert (cursor != NULL_TREE);
18483
18484 ret = new_loc_descr (DW_OP_pick, i, 0);
18485 ret->frame_offset_rel = 1;
18486 break;
18487 }
18488 /* FALLTHRU */
18489
18490 case RESULT_DECL:
18491 if (DECL_HAS_VALUE_EXPR_P (loc))
18492 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18493 want_address, context);
18494 /* FALLTHRU */
18495
18496 case FUNCTION_DECL:
18497 {
18498 rtx rtl;
18499 var_loc_list *loc_list = lookup_decl_loc (loc);
18500
18501 if (loc_list && loc_list->first)
18502 {
18503 list_ret = dw_loc_list (loc_list, loc, want_address);
18504 have_address = want_address != 0;
18505 break;
18506 }
18507 rtl = rtl_for_decl_location (loc);
18508 if (rtl == NULL_RTX)
18509 {
18510 if (TREE_CODE (loc) != FUNCTION_DECL
18511 && early_dwarf
18512 && current_function_decl
18513 && want_address != 1
18514 && ! DECL_IGNORED_P (loc)
18515 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18516 || POINTER_TYPE_P (TREE_TYPE (loc)))
18517 && DECL_CONTEXT (loc) == current_function_decl
18518 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18519 <= DWARF2_ADDR_SIZE))
18520 {
18521 dw_die_ref ref = lookup_decl_die (loc);
18522 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18523 if (ref)
18524 {
18525 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18526 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18527 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18528 }
18529 else
18530 {
18531 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18532 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18533 }
18534 break;
18535 }
18536 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18537 return 0;
18538 }
18539 else if (CONST_INT_P (rtl))
18540 {
18541 HOST_WIDE_INT val = INTVAL (rtl);
18542 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18543 val &= GET_MODE_MASK (DECL_MODE (loc));
18544 ret = int_loc_descriptor (val);
18545 }
18546 else if (GET_CODE (rtl) == CONST_STRING)
18547 {
18548 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18549 return 0;
18550 }
18551 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18552 ret = new_addr_loc_descr (rtl, dtprel_false);
18553 else
18554 {
18555 machine_mode mode, mem_mode;
18556
18557 /* Certain constructs can only be represented at top-level. */
18558 if (want_address == 2)
18559 {
18560 ret = loc_descriptor (rtl, VOIDmode,
18561 VAR_INIT_STATUS_INITIALIZED);
18562 have_address = 1;
18563 }
18564 else
18565 {
18566 mode = GET_MODE (rtl);
18567 mem_mode = VOIDmode;
18568 if (MEM_P (rtl))
18569 {
18570 mem_mode = mode;
18571 mode = get_address_mode (rtl);
18572 rtl = XEXP (rtl, 0);
18573 have_address = 1;
18574 }
18575 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18576 VAR_INIT_STATUS_INITIALIZED);
18577 }
18578 if (!ret)
18579 expansion_failed (loc, rtl,
18580 "failed to produce loc descriptor for rtl");
18581 }
18582 }
18583 break;
18584
18585 case MEM_REF:
18586 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18587 {
18588 have_address = 1;
18589 goto do_plus;
18590 }
18591 /* Fallthru. */
18592 case INDIRECT_REF:
18593 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18594 have_address = 1;
18595 break;
18596
18597 case TARGET_MEM_REF:
18598 case SSA_NAME:
18599 case DEBUG_EXPR_DECL:
18600 return NULL;
18601
18602 case COMPOUND_EXPR:
18603 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18604 context);
18605
18606 CASE_CONVERT:
18607 case VIEW_CONVERT_EXPR:
18608 case SAVE_EXPR:
18609 case MODIFY_EXPR:
18610 case NON_LVALUE_EXPR:
18611 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18612 context);
18613
18614 case COMPONENT_REF:
18615 case BIT_FIELD_REF:
18616 case ARRAY_REF:
18617 case ARRAY_RANGE_REF:
18618 case REALPART_EXPR:
18619 case IMAGPART_EXPR:
18620 {
18621 tree obj, offset;
18622 poly_int64 bitsize, bitpos, bytepos;
18623 machine_mode mode;
18624 int unsignedp, reversep, volatilep = 0;
18625
18626 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18627 &unsignedp, &reversep, &volatilep);
18628
18629 gcc_assert (obj != loc);
18630
18631 list_ret = loc_list_from_tree_1 (obj,
18632 want_address == 2
18633 && known_eq (bitpos, 0)
18634 && !offset ? 2 : 1,
18635 context);
18636 /* TODO: We can extract value of the small expression via shifting even
18637 for nonzero bitpos. */
18638 if (list_ret == 0)
18639 return 0;
18640 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18641 || !multiple_p (bitsize, BITS_PER_UNIT))
18642 {
18643 expansion_failed (loc, NULL_RTX,
18644 "bitfield access");
18645 return 0;
18646 }
18647
18648 if (offset != NULL_TREE)
18649 {
18650 /* Variable offset. */
18651 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18652 if (list_ret1 == 0)
18653 return 0;
18654 add_loc_list (&list_ret, list_ret1);
18655 if (!list_ret)
18656 return 0;
18657 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18658 }
18659
18660 HOST_WIDE_INT value;
18661 if (bytepos.is_constant (&value) && value > 0)
18662 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18663 value, 0));
18664 else if (maybe_ne (bytepos, 0))
18665 loc_list_plus_const (list_ret, bytepos);
18666
18667 have_address = 1;
18668 break;
18669 }
18670
18671 case INTEGER_CST:
18672 if ((want_address || !tree_fits_shwi_p (loc))
18673 && (ret = cst_pool_loc_descr (loc)))
18674 have_address = 1;
18675 else if (want_address == 2
18676 && tree_fits_shwi_p (loc)
18677 && (ret = address_of_int_loc_descriptor
18678 (int_size_in_bytes (TREE_TYPE (loc)),
18679 tree_to_shwi (loc))))
18680 have_address = 1;
18681 else if (tree_fits_shwi_p (loc))
18682 ret = int_loc_descriptor (tree_to_shwi (loc));
18683 else if (tree_fits_uhwi_p (loc))
18684 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18685 else
18686 {
18687 expansion_failed (loc, NULL_RTX,
18688 "Integer operand is not host integer");
18689 return 0;
18690 }
18691 break;
18692
18693 case POLY_INT_CST:
18694 {
18695 if (want_address)
18696 {
18697 expansion_failed (loc, NULL_RTX,
18698 "constant address with a runtime component");
18699 return 0;
18700 }
18701 poly_int64 value;
18702 if (!poly_int_tree_p (loc, &value))
18703 {
18704 expansion_failed (loc, NULL_RTX, "constant too big");
18705 return 0;
18706 }
18707 ret = int_loc_descriptor (value);
18708 }
18709 break;
18710
18711 case CONSTRUCTOR:
18712 case REAL_CST:
18713 case STRING_CST:
18714 case COMPLEX_CST:
18715 if ((ret = cst_pool_loc_descr (loc)))
18716 have_address = 1;
18717 else if (TREE_CODE (loc) == CONSTRUCTOR)
18718 {
18719 tree type = TREE_TYPE (loc);
18720 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18721 unsigned HOST_WIDE_INT offset = 0;
18722 unsigned HOST_WIDE_INT cnt;
18723 constructor_elt *ce;
18724
18725 if (TREE_CODE (type) == RECORD_TYPE)
18726 {
18727 /* This is very limited, but it's enough to output
18728 pointers to member functions, as long as the
18729 referenced function is defined in the current
18730 translation unit. */
18731 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18732 {
18733 tree val = ce->value;
18734
18735 tree field = ce->index;
18736
18737 if (val)
18738 STRIP_NOPS (val);
18739
18740 if (!field || DECL_BIT_FIELD (field))
18741 {
18742 expansion_failed (loc, NULL_RTX,
18743 "bitfield in record type constructor");
18744 size = offset = (unsigned HOST_WIDE_INT)-1;
18745 ret = NULL;
18746 break;
18747 }
18748
18749 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18750 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18751 gcc_assert (pos + fieldsize <= size);
18752 if (pos < offset)
18753 {
18754 expansion_failed (loc, NULL_RTX,
18755 "out-of-order fields in record constructor");
18756 size = offset = (unsigned HOST_WIDE_INT)-1;
18757 ret = NULL;
18758 break;
18759 }
18760 if (pos > offset)
18761 {
18762 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18763 add_loc_descr (&ret, ret1);
18764 offset = pos;
18765 }
18766 if (val && fieldsize != 0)
18767 {
18768 ret1 = loc_descriptor_from_tree (val, want_address, context);
18769 if (!ret1)
18770 {
18771 expansion_failed (loc, NULL_RTX,
18772 "unsupported expression in field");
18773 size = offset = (unsigned HOST_WIDE_INT)-1;
18774 ret = NULL;
18775 break;
18776 }
18777 add_loc_descr (&ret, ret1);
18778 }
18779 if (fieldsize)
18780 {
18781 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18782 add_loc_descr (&ret, ret1);
18783 offset = pos + fieldsize;
18784 }
18785 }
18786
18787 if (offset != size)
18788 {
18789 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18790 add_loc_descr (&ret, ret1);
18791 offset = size;
18792 }
18793
18794 have_address = !!want_address;
18795 }
18796 else
18797 expansion_failed (loc, NULL_RTX,
18798 "constructor of non-record type");
18799 }
18800 else
18801 /* We can construct small constants here using int_loc_descriptor. */
18802 expansion_failed (loc, NULL_RTX,
18803 "constructor or constant not in constant pool");
18804 break;
18805
18806 case TRUTH_AND_EXPR:
18807 case TRUTH_ANDIF_EXPR:
18808 case BIT_AND_EXPR:
18809 op = DW_OP_and;
18810 goto do_binop;
18811
18812 case TRUTH_XOR_EXPR:
18813 case BIT_XOR_EXPR:
18814 op = DW_OP_xor;
18815 goto do_binop;
18816
18817 case TRUTH_OR_EXPR:
18818 case TRUTH_ORIF_EXPR:
18819 case BIT_IOR_EXPR:
18820 op = DW_OP_or;
18821 goto do_binop;
18822
18823 case FLOOR_DIV_EXPR:
18824 case CEIL_DIV_EXPR:
18825 case ROUND_DIV_EXPR:
18826 case TRUNC_DIV_EXPR:
18827 case EXACT_DIV_EXPR:
18828 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18829 return 0;
18830 op = DW_OP_div;
18831 goto do_binop;
18832
18833 case MINUS_EXPR:
18834 op = DW_OP_minus;
18835 goto do_binop;
18836
18837 case FLOOR_MOD_EXPR:
18838 case CEIL_MOD_EXPR:
18839 case ROUND_MOD_EXPR:
18840 case TRUNC_MOD_EXPR:
18841 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18842 {
18843 op = DW_OP_mod;
18844 goto do_binop;
18845 }
18846 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18847 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18848 if (list_ret == 0 || list_ret1 == 0)
18849 return 0;
18850
18851 add_loc_list (&list_ret, list_ret1);
18852 if (list_ret == 0)
18853 return 0;
18854 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18855 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18856 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18857 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18858 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18859 break;
18860
18861 case MULT_EXPR:
18862 op = DW_OP_mul;
18863 goto do_binop;
18864
18865 case LSHIFT_EXPR:
18866 op = DW_OP_shl;
18867 goto do_binop;
18868
18869 case RSHIFT_EXPR:
18870 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18871 goto do_binop;
18872
18873 case POINTER_PLUS_EXPR:
18874 case PLUS_EXPR:
18875 do_plus:
18876 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18877 {
18878 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18879 smarter to encode their opposite. The DW_OP_plus_uconst operation
18880 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18881 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18882 bytes, Y being the size of the operation that pushes the opposite
18883 of the addend. So let's choose the smallest representation. */
18884 const tree tree_addend = TREE_OPERAND (loc, 1);
18885 offset_int wi_addend;
18886 HOST_WIDE_INT shwi_addend;
18887 dw_loc_descr_ref loc_naddend;
18888
18889 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18890 if (list_ret == 0)
18891 return 0;
18892
18893 /* Try to get the literal to push. It is the opposite of the addend,
18894 so as we rely on wrapping during DWARF evaluation, first decode
18895 the literal as a "DWARF-sized" signed number. */
18896 wi_addend = wi::to_offset (tree_addend);
18897 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18898 shwi_addend = wi_addend.to_shwi ();
18899 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18900 ? int_loc_descriptor (-shwi_addend)
18901 : NULL;
18902
18903 if (loc_naddend != NULL
18904 && ((unsigned) size_of_uleb128 (shwi_addend)
18905 > size_of_loc_descr (loc_naddend)))
18906 {
18907 add_loc_descr_to_each (list_ret, loc_naddend);
18908 add_loc_descr_to_each (list_ret,
18909 new_loc_descr (DW_OP_minus, 0, 0));
18910 }
18911 else
18912 {
18913 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18914 {
18915 loc_naddend = loc_cur;
18916 loc_cur = loc_cur->dw_loc_next;
18917 ggc_free (loc_naddend);
18918 }
18919 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18920 }
18921 break;
18922 }
18923
18924 op = DW_OP_plus;
18925 goto do_binop;
18926
18927 case LE_EXPR:
18928 op = DW_OP_le;
18929 goto do_comp_binop;
18930
18931 case GE_EXPR:
18932 op = DW_OP_ge;
18933 goto do_comp_binop;
18934
18935 case LT_EXPR:
18936 op = DW_OP_lt;
18937 goto do_comp_binop;
18938
18939 case GT_EXPR:
18940 op = DW_OP_gt;
18941 goto do_comp_binop;
18942
18943 do_comp_binop:
18944 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18945 {
18946 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18947 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18948 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18949 TREE_CODE (loc));
18950 break;
18951 }
18952 else
18953 goto do_binop;
18954
18955 case EQ_EXPR:
18956 op = DW_OP_eq;
18957 goto do_binop;
18958
18959 case NE_EXPR:
18960 op = DW_OP_ne;
18961 goto do_binop;
18962
18963 do_binop:
18964 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18965 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18966 if (list_ret == 0 || list_ret1 == 0)
18967 return 0;
18968
18969 add_loc_list (&list_ret, list_ret1);
18970 if (list_ret == 0)
18971 return 0;
18972 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18973 break;
18974
18975 case TRUTH_NOT_EXPR:
18976 case BIT_NOT_EXPR:
18977 op = DW_OP_not;
18978 goto do_unop;
18979
18980 case ABS_EXPR:
18981 op = DW_OP_abs;
18982 goto do_unop;
18983
18984 case NEGATE_EXPR:
18985 op = DW_OP_neg;
18986 goto do_unop;
18987
18988 do_unop:
18989 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18990 if (list_ret == 0)
18991 return 0;
18992
18993 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18994 break;
18995
18996 case MIN_EXPR:
18997 case MAX_EXPR:
18998 {
18999 const enum tree_code code =
19000 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
19001
19002 loc = build3 (COND_EXPR, TREE_TYPE (loc),
19003 build2 (code, integer_type_node,
19004 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
19005 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
19006 }
19007
19008 /* fall through */
19009
19010 case COND_EXPR:
19011 {
19012 dw_loc_descr_ref lhs
19013 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
19014 dw_loc_list_ref rhs
19015 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
19016 dw_loc_descr_ref bra_node, jump_node, tmp;
19017
19018 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
19019 if (list_ret == 0 || lhs == 0 || rhs == 0)
19020 return 0;
19021
19022 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
19023 add_loc_descr_to_each (list_ret, bra_node);
19024
19025 add_loc_list (&list_ret, rhs);
19026 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
19027 add_loc_descr_to_each (list_ret, jump_node);
19028
19029 add_loc_descr_to_each (list_ret, lhs);
19030 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
19031 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
19032
19033 /* ??? Need a node to point the skip at. Use a nop. */
19034 tmp = new_loc_descr (DW_OP_nop, 0, 0);
19035 add_loc_descr_to_each (list_ret, tmp);
19036 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
19037 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
19038 }
19039 break;
19040
19041 case FIX_TRUNC_EXPR:
19042 return 0;
19043
19044 default:
19045 /* Leave front-end specific codes as simply unknown. This comes
19046 up, for instance, with the C STMT_EXPR. */
19047 if ((unsigned int) TREE_CODE (loc)
19048 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
19049 {
19050 expansion_failed (loc, NULL_RTX,
19051 "language specific tree node");
19052 return 0;
19053 }
19054
19055 /* Otherwise this is a generic code; we should just lists all of
19056 these explicitly. We forgot one. */
19057 if (flag_checking)
19058 gcc_unreachable ();
19059
19060 /* In a release build, we want to degrade gracefully: better to
19061 generate incomplete debugging information than to crash. */
19062 return NULL;
19063 }
19064
19065 if (!ret && !list_ret)
19066 return 0;
19067
19068 if (want_address == 2 && !have_address
19069 && (dwarf_version >= 4 || !dwarf_strict))
19070 {
19071 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
19072 {
19073 expansion_failed (loc, NULL_RTX,
19074 "DWARF address size mismatch");
19075 return 0;
19076 }
19077 if (ret)
19078 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
19079 else
19080 add_loc_descr_to_each (list_ret,
19081 new_loc_descr (DW_OP_stack_value, 0, 0));
19082 have_address = 1;
19083 }
19084 /* Show if we can't fill the request for an address. */
19085 if (want_address && !have_address)
19086 {
19087 expansion_failed (loc, NULL_RTX,
19088 "Want address and only have value");
19089 return 0;
19090 }
19091
19092 gcc_assert (!ret || !list_ret);
19093
19094 /* If we've got an address and don't want one, dereference. */
19095 if (!want_address && have_address)
19096 {
19097 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
19098
19099 if (size > DWARF2_ADDR_SIZE || size == -1)
19100 {
19101 expansion_failed (loc, NULL_RTX,
19102 "DWARF address size mismatch");
19103 return 0;
19104 }
19105 else if (size == DWARF2_ADDR_SIZE)
19106 op = DW_OP_deref;
19107 else
19108 op = DW_OP_deref_size;
19109
19110 if (ret)
19111 add_loc_descr (&ret, new_loc_descr (op, size, 0));
19112 else
19113 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
19114 }
19115 if (ret)
19116 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
19117
19118 return list_ret;
19119 }
19120
19121 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19122 expressions. */
19123
19124 static dw_loc_list_ref
19125 loc_list_from_tree (tree loc, int want_address,
19126 struct loc_descr_context *context)
19127 {
19128 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19129
19130 for (dw_loc_list_ref loc_cur = result;
19131 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19132 loc_descr_without_nops (loc_cur->expr);
19133 return result;
19134 }
19135
19136 /* Same as above but return only single location expression. */
19137 static dw_loc_descr_ref
19138 loc_descriptor_from_tree (tree loc, int want_address,
19139 struct loc_descr_context *context)
19140 {
19141 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19142 if (!ret)
19143 return NULL;
19144 if (ret->dw_loc_next)
19145 {
19146 expansion_failed (loc, NULL_RTX,
19147 "Location list where only loc descriptor needed");
19148 return NULL;
19149 }
19150 return ret->expr;
19151 }
19152
19153 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19154 pointer to the declared type for the relevant field variable, or return
19155 `integer_type_node' if the given node turns out to be an
19156 ERROR_MARK node. */
19157
19158 static inline tree
19159 field_type (const_tree decl)
19160 {
19161 tree type;
19162
19163 if (TREE_CODE (decl) == ERROR_MARK)
19164 return integer_type_node;
19165
19166 type = DECL_BIT_FIELD_TYPE (decl);
19167 if (type == NULL_TREE)
19168 type = TREE_TYPE (decl);
19169
19170 return type;
19171 }
19172
19173 /* Given a pointer to a tree node, return the alignment in bits for
19174 it, or else return BITS_PER_WORD if the node actually turns out to
19175 be an ERROR_MARK node. */
19176
19177 static inline unsigned
19178 simple_type_align_in_bits (const_tree type)
19179 {
19180 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19181 }
19182
19183 static inline unsigned
19184 simple_decl_align_in_bits (const_tree decl)
19185 {
19186 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19187 }
19188
19189 /* Return the result of rounding T up to ALIGN. */
19190
19191 static inline offset_int
19192 round_up_to_align (const offset_int &t, unsigned int align)
19193 {
19194 return wi::udiv_trunc (t + align - 1, align) * align;
19195 }
19196
19197 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19198 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19199 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19200 if we fail to return the size in one of these two forms. */
19201
19202 static dw_loc_descr_ref
19203 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19204 {
19205 tree tree_size;
19206 struct loc_descr_context ctx;
19207
19208 /* Return a constant integer in priority, if possible. */
19209 *cst_size = int_size_in_bytes (type);
19210 if (*cst_size != -1)
19211 return NULL;
19212
19213 ctx.context_type = const_cast<tree> (type);
19214 ctx.base_decl = NULL_TREE;
19215 ctx.dpi = NULL;
19216 ctx.placeholder_arg = false;
19217 ctx.placeholder_seen = false;
19218
19219 type = TYPE_MAIN_VARIANT (type);
19220 tree_size = TYPE_SIZE_UNIT (type);
19221 return ((tree_size != NULL_TREE)
19222 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19223 : NULL);
19224 }
19225
19226 /* Helper structure for RECORD_TYPE processing. */
19227 struct vlr_context
19228 {
19229 /* Root RECORD_TYPE. It is needed to generate data member location
19230 descriptions in variable-length records (VLR), but also to cope with
19231 variants, which are composed of nested structures multiplexed with
19232 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19233 function processing a FIELD_DECL, it is required to be non null. */
19234 tree struct_type;
19235
19236 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19237 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19238 this variant part as part of the root record (in storage units). For
19239 regular records, it must be NULL_TREE. */
19240 tree variant_part_offset;
19241 };
19242
19243 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19244 addressed byte of the "containing object" for the given FIELD_DECL. If
19245 possible, return a native constant through CST_OFFSET (in which case NULL is
19246 returned); otherwise return a DWARF expression that computes the offset.
19247
19248 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19249 that offset is, either because the argument turns out to be a pointer to an
19250 ERROR_MARK node, or because the offset expression is too complex for us.
19251
19252 CTX is required: see the comment for VLR_CONTEXT. */
19253
19254 static dw_loc_descr_ref
19255 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19256 HOST_WIDE_INT *cst_offset)
19257 {
19258 tree tree_result;
19259 dw_loc_list_ref loc_result;
19260
19261 *cst_offset = 0;
19262
19263 if (TREE_CODE (decl) == ERROR_MARK)
19264 return NULL;
19265 else
19266 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19267
19268 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19269 case. */
19270 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19271 return NULL;
19272
19273 /* We used to handle only constant offsets in all cases. Now, we handle
19274 properly dynamic byte offsets only when PCC bitfield type doesn't
19275 matter. */
19276 if (PCC_BITFIELD_TYPE_MATTERS
19277 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19278 {
19279 offset_int object_offset_in_bits;
19280 offset_int object_offset_in_bytes;
19281 offset_int bitpos_int;
19282 tree type;
19283 tree field_size_tree;
19284 offset_int deepest_bitpos;
19285 offset_int field_size_in_bits;
19286 unsigned int type_align_in_bits;
19287 unsigned int decl_align_in_bits;
19288 offset_int type_size_in_bits;
19289
19290 bitpos_int = wi::to_offset (bit_position (decl));
19291 type = field_type (decl);
19292 type_size_in_bits = offset_int_type_size_in_bits (type);
19293 type_align_in_bits = simple_type_align_in_bits (type);
19294
19295 field_size_tree = DECL_SIZE (decl);
19296
19297 /* The size could be unspecified if there was an error, or for
19298 a flexible array member. */
19299 if (!field_size_tree)
19300 field_size_tree = bitsize_zero_node;
19301
19302 /* If the size of the field is not constant, use the type size. */
19303 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19304 field_size_in_bits = wi::to_offset (field_size_tree);
19305 else
19306 field_size_in_bits = type_size_in_bits;
19307
19308 decl_align_in_bits = simple_decl_align_in_bits (decl);
19309
19310 /* The GCC front-end doesn't make any attempt to keep track of the
19311 starting bit offset (relative to the start of the containing
19312 structure type) of the hypothetical "containing object" for a
19313 bit-field. Thus, when computing the byte offset value for the
19314 start of the "containing object" of a bit-field, we must deduce
19315 this information on our own. This can be rather tricky to do in
19316 some cases. For example, handling the following structure type
19317 definition when compiling for an i386/i486 target (which only
19318 aligns long long's to 32-bit boundaries) can be very tricky:
19319
19320 struct S { int field1; long long field2:31; };
19321
19322 Fortunately, there is a simple rule-of-thumb which can be used
19323 in such cases. When compiling for an i386/i486, GCC will
19324 allocate 8 bytes for the structure shown above. It decides to
19325 do this based upon one simple rule for bit-field allocation.
19326 GCC allocates each "containing object" for each bit-field at
19327 the first (i.e. lowest addressed) legitimate alignment boundary
19328 (based upon the required minimum alignment for the declared
19329 type of the field) which it can possibly use, subject to the
19330 condition that there is still enough available space remaining
19331 in the containing object (when allocated at the selected point)
19332 to fully accommodate all of the bits of the bit-field itself.
19333
19334 This simple rule makes it obvious why GCC allocates 8 bytes for
19335 each object of the structure type shown above. When looking
19336 for a place to allocate the "containing object" for `field2',
19337 the compiler simply tries to allocate a 64-bit "containing
19338 object" at each successive 32-bit boundary (starting at zero)
19339 until it finds a place to allocate that 64- bit field such that
19340 at least 31 contiguous (and previously unallocated) bits remain
19341 within that selected 64 bit field. (As it turns out, for the
19342 example above, the compiler finds it is OK to allocate the
19343 "containing object" 64-bit field at bit-offset zero within the
19344 structure type.)
19345
19346 Here we attempt to work backwards from the limited set of facts
19347 we're given, and we try to deduce from those facts, where GCC
19348 must have believed that the containing object started (within
19349 the structure type). The value we deduce is then used (by the
19350 callers of this routine) to generate DW_AT_location and
19351 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19352 the case of DW_AT_location, regular fields as well). */
19353
19354 /* Figure out the bit-distance from the start of the structure to
19355 the "deepest" bit of the bit-field. */
19356 deepest_bitpos = bitpos_int + field_size_in_bits;
19357
19358 /* This is the tricky part. Use some fancy footwork to deduce
19359 where the lowest addressed bit of the containing object must
19360 be. */
19361 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19362
19363 /* Round up to type_align by default. This works best for
19364 bitfields. */
19365 object_offset_in_bits
19366 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19367
19368 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19369 {
19370 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19371
19372 /* Round up to decl_align instead. */
19373 object_offset_in_bits
19374 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19375 }
19376
19377 object_offset_in_bytes
19378 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19379 if (ctx->variant_part_offset == NULL_TREE)
19380 {
19381 *cst_offset = object_offset_in_bytes.to_shwi ();
19382 return NULL;
19383 }
19384 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19385 }
19386 else
19387 tree_result = byte_position (decl);
19388
19389 if (ctx->variant_part_offset != NULL_TREE)
19390 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19391 ctx->variant_part_offset, tree_result);
19392
19393 /* If the byte offset is a constant, it's simplier to handle a native
19394 constant rather than a DWARF expression. */
19395 if (TREE_CODE (tree_result) == INTEGER_CST)
19396 {
19397 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19398 return NULL;
19399 }
19400 struct loc_descr_context loc_ctx = {
19401 ctx->struct_type, /* context_type */
19402 NULL_TREE, /* base_decl */
19403 NULL, /* dpi */
19404 false, /* placeholder_arg */
19405 false /* placeholder_seen */
19406 };
19407 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19408
19409 /* We want a DWARF expression: abort if we only have a location list with
19410 multiple elements. */
19411 if (!loc_result || !single_element_loc_list_p (loc_result))
19412 return NULL;
19413 else
19414 return loc_result->expr;
19415 }
19416 \f
19417 /* The following routines define various Dwarf attributes and any data
19418 associated with them. */
19419
19420 /* Add a location description attribute value to a DIE.
19421
19422 This emits location attributes suitable for whole variables and
19423 whole parameters. Note that the location attributes for struct fields are
19424 generated by the routine `data_member_location_attribute' below. */
19425
19426 static inline void
19427 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19428 dw_loc_list_ref descr)
19429 {
19430 bool check_no_locviews = true;
19431 if (descr == 0)
19432 return;
19433 if (single_element_loc_list_p (descr))
19434 add_AT_loc (die, attr_kind, descr->expr);
19435 else
19436 {
19437 add_AT_loc_list (die, attr_kind, descr);
19438 gcc_assert (descr->ll_symbol);
19439 if (attr_kind == DW_AT_location && descr->vl_symbol
19440 && dwarf2out_locviews_in_attribute ())
19441 {
19442 add_AT_view_list (die, DW_AT_GNU_locviews);
19443 check_no_locviews = false;
19444 }
19445 }
19446
19447 if (check_no_locviews)
19448 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19449 }
19450
19451 /* Add DW_AT_accessibility attribute to DIE if needed. */
19452
19453 static void
19454 add_accessibility_attribute (dw_die_ref die, tree decl)
19455 {
19456 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19457 children, otherwise the default is DW_ACCESS_public. In DWARF2
19458 the default has always been DW_ACCESS_public. */
19459 if (TREE_PROTECTED (decl))
19460 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19461 else if (TREE_PRIVATE (decl))
19462 {
19463 if (dwarf_version == 2
19464 || die->die_parent == NULL
19465 || die->die_parent->die_tag != DW_TAG_class_type)
19466 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19467 }
19468 else if (dwarf_version > 2
19469 && die->die_parent
19470 && die->die_parent->die_tag == DW_TAG_class_type)
19471 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19472 }
19473
19474 /* Attach the specialized form of location attribute used for data members of
19475 struct and union types. In the special case of a FIELD_DECL node which
19476 represents a bit-field, the "offset" part of this special location
19477 descriptor must indicate the distance in bytes from the lowest-addressed
19478 byte of the containing struct or union type to the lowest-addressed byte of
19479 the "containing object" for the bit-field. (See the `field_byte_offset'
19480 function above).
19481
19482 For any given bit-field, the "containing object" is a hypothetical object
19483 (of some integral or enum type) within which the given bit-field lives. The
19484 type of this hypothetical "containing object" is always the same as the
19485 declared type of the individual bit-field itself (for GCC anyway... the
19486 DWARF spec doesn't actually mandate this). Note that it is the size (in
19487 bytes) of the hypothetical "containing object" which will be given in the
19488 DW_AT_byte_size attribute for this bit-field. (See the
19489 `byte_size_attribute' function below.) It is also used when calculating the
19490 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19491 function below.)
19492
19493 CTX is required: see the comment for VLR_CONTEXT. */
19494
19495 static void
19496 add_data_member_location_attribute (dw_die_ref die,
19497 tree decl,
19498 struct vlr_context *ctx)
19499 {
19500 HOST_WIDE_INT offset;
19501 dw_loc_descr_ref loc_descr = 0;
19502
19503 if (TREE_CODE (decl) == TREE_BINFO)
19504 {
19505 /* We're working on the TAG_inheritance for a base class. */
19506 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19507 {
19508 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19509 aren't at a fixed offset from all (sub)objects of the same
19510 type. We need to extract the appropriate offset from our
19511 vtable. The following dwarf expression means
19512
19513 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19514
19515 This is specific to the V3 ABI, of course. */
19516
19517 dw_loc_descr_ref tmp;
19518
19519 /* Make a copy of the object address. */
19520 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19521 add_loc_descr (&loc_descr, tmp);
19522
19523 /* Extract the vtable address. */
19524 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19525 add_loc_descr (&loc_descr, tmp);
19526
19527 /* Calculate the address of the offset. */
19528 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19529 gcc_assert (offset < 0);
19530
19531 tmp = int_loc_descriptor (-offset);
19532 add_loc_descr (&loc_descr, tmp);
19533 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19534 add_loc_descr (&loc_descr, tmp);
19535
19536 /* Extract the offset. */
19537 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19538 add_loc_descr (&loc_descr, tmp);
19539
19540 /* Add it to the object address. */
19541 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19542 add_loc_descr (&loc_descr, tmp);
19543 }
19544 else
19545 offset = tree_to_shwi (BINFO_OFFSET (decl));
19546 }
19547 else
19548 {
19549 loc_descr = field_byte_offset (decl, ctx, &offset);
19550
19551 /* If loc_descr is available then we know the field offset is dynamic.
19552 However, GDB does not handle dynamic field offsets very well at the
19553 moment. */
19554 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19555 {
19556 loc_descr = NULL;
19557 offset = 0;
19558 }
19559
19560 /* Data member location evalutation starts with the base address on the
19561 stack. Compute the field offset and add it to this base address. */
19562 else if (loc_descr != NULL)
19563 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19564 }
19565
19566 if (! loc_descr)
19567 {
19568 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19569 e.g. GDB only added support to it in November 2016. For DWARF5
19570 we need newer debug info consumers anyway. We might change this
19571 to dwarf_version >= 4 once most consumers catched up. */
19572 if (dwarf_version >= 5
19573 && TREE_CODE (decl) == FIELD_DECL
19574 && DECL_BIT_FIELD_TYPE (decl)
19575 && (ctx->variant_part_offset == NULL_TREE
19576 || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST))
19577 {
19578 tree off = bit_position (decl);
19579 if (ctx->variant_part_offset)
19580 off = bit_from_pos (ctx->variant_part_offset, off);
19581 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19582 {
19583 remove_AT (die, DW_AT_byte_size);
19584 remove_AT (die, DW_AT_bit_offset);
19585 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19586 return;
19587 }
19588 }
19589 if (dwarf_version > 2)
19590 {
19591 /* Don't need to output a location expression, just the constant. */
19592 if (offset < 0)
19593 add_AT_int (die, DW_AT_data_member_location, offset);
19594 else
19595 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19596 return;
19597 }
19598 else
19599 {
19600 enum dwarf_location_atom op;
19601
19602 /* The DWARF2 standard says that we should assume that the structure
19603 address is already on the stack, so we can specify a structure
19604 field address by using DW_OP_plus_uconst. */
19605 op = DW_OP_plus_uconst;
19606 loc_descr = new_loc_descr (op, offset, 0);
19607 }
19608 }
19609
19610 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19611 }
19612
19613 /* Writes integer values to dw_vec_const array. */
19614
19615 static void
19616 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19617 {
19618 while (size != 0)
19619 {
19620 *dest++ = val & 0xff;
19621 val >>= 8;
19622 --size;
19623 }
19624 }
19625
19626 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19627
19628 static HOST_WIDE_INT
19629 extract_int (const unsigned char *src, unsigned int size)
19630 {
19631 HOST_WIDE_INT val = 0;
19632
19633 src += size;
19634 while (size != 0)
19635 {
19636 val <<= 8;
19637 val |= *--src & 0xff;
19638 --size;
19639 }
19640 return val;
19641 }
19642
19643 /* Writes wide_int values to dw_vec_const array. */
19644
19645 static void
19646 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19647 {
19648 int i;
19649
19650 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19651 {
19652 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19653 return;
19654 }
19655
19656 /* We'd have to extend this code to support odd sizes. */
19657 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19658
19659 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19660
19661 if (WORDS_BIG_ENDIAN)
19662 for (i = n - 1; i >= 0; i--)
19663 {
19664 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19665 dest += sizeof (HOST_WIDE_INT);
19666 }
19667 else
19668 for (i = 0; i < n; i++)
19669 {
19670 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19671 dest += sizeof (HOST_WIDE_INT);
19672 }
19673 }
19674
19675 /* Writes floating point values to dw_vec_const array. */
19676
19677 static void
19678 insert_float (const_rtx rtl, unsigned char *array)
19679 {
19680 long val[4];
19681 int i;
19682 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19683
19684 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19685
19686 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19687 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19688 {
19689 insert_int (val[i], 4, array);
19690 array += 4;
19691 }
19692 }
19693
19694 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19695 does not have a "location" either in memory or in a register. These
19696 things can arise in GNU C when a constant is passed as an actual parameter
19697 to an inlined function. They can also arise in C++ where declared
19698 constants do not necessarily get memory "homes". */
19699
19700 static bool
19701 add_const_value_attribute (dw_die_ref die, rtx rtl)
19702 {
19703 switch (GET_CODE (rtl))
19704 {
19705 case CONST_INT:
19706 {
19707 HOST_WIDE_INT val = INTVAL (rtl);
19708
19709 if (val < 0)
19710 add_AT_int (die, DW_AT_const_value, val);
19711 else
19712 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19713 }
19714 return true;
19715
19716 case CONST_WIDE_INT:
19717 {
19718 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19719 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19720 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19721 wide_int w = wi::zext (w1, prec);
19722 add_AT_wide (die, DW_AT_const_value, w);
19723 }
19724 return true;
19725
19726 case CONST_DOUBLE:
19727 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19728 floating-point constant. A CONST_DOUBLE is used whenever the
19729 constant requires more than one word in order to be adequately
19730 represented. */
19731 if (TARGET_SUPPORTS_WIDE_INT == 0
19732 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19733 add_AT_double (die, DW_AT_const_value,
19734 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19735 else
19736 {
19737 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19738 unsigned int length = GET_MODE_SIZE (mode);
19739 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19740
19741 insert_float (rtl, array);
19742 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19743 }
19744 return true;
19745
19746 case CONST_VECTOR:
19747 {
19748 unsigned int length;
19749 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19750 return false;
19751
19752 machine_mode mode = GET_MODE (rtl);
19753 /* The combination of a length and byte elt_size doesn't extend
19754 naturally to boolean vectors, where several elements are packed
19755 into the same byte. */
19756 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
19757 return false;
19758
19759 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19760 unsigned char *array
19761 = ggc_vec_alloc<unsigned char> (length * elt_size);
19762 unsigned int i;
19763 unsigned char *p;
19764 machine_mode imode = GET_MODE_INNER (mode);
19765
19766 switch (GET_MODE_CLASS (mode))
19767 {
19768 case MODE_VECTOR_INT:
19769 for (i = 0, p = array; i < length; i++, p += elt_size)
19770 {
19771 rtx elt = CONST_VECTOR_ELT (rtl, i);
19772 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19773 }
19774 break;
19775
19776 case MODE_VECTOR_FLOAT:
19777 for (i = 0, p = array; i < length; i++, p += elt_size)
19778 {
19779 rtx elt = CONST_VECTOR_ELT (rtl, i);
19780 insert_float (elt, p);
19781 }
19782 break;
19783
19784 default:
19785 gcc_unreachable ();
19786 }
19787
19788 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19789 }
19790 return true;
19791
19792 case CONST_STRING:
19793 if (dwarf_version >= 4 || !dwarf_strict)
19794 {
19795 dw_loc_descr_ref loc_result;
19796 resolve_one_addr (&rtl);
19797 rtl_addr:
19798 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19799 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19800 add_AT_loc (die, DW_AT_location, loc_result);
19801 vec_safe_push (used_rtx_array, rtl);
19802 return true;
19803 }
19804 return false;
19805
19806 case CONST:
19807 if (CONSTANT_P (XEXP (rtl, 0)))
19808 return add_const_value_attribute (die, XEXP (rtl, 0));
19809 /* FALLTHROUGH */
19810 case SYMBOL_REF:
19811 if (!const_ok_for_output (rtl))
19812 return false;
19813 /* FALLTHROUGH */
19814 case LABEL_REF:
19815 if (dwarf_version >= 4 || !dwarf_strict)
19816 goto rtl_addr;
19817 return false;
19818
19819 case PLUS:
19820 /* In cases where an inlined instance of an inline function is passed
19821 the address of an `auto' variable (which is local to the caller) we
19822 can get a situation where the DECL_RTL of the artificial local
19823 variable (for the inlining) which acts as a stand-in for the
19824 corresponding formal parameter (of the inline function) will look
19825 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19826 exactly a compile-time constant expression, but it isn't the address
19827 of the (artificial) local variable either. Rather, it represents the
19828 *value* which the artificial local variable always has during its
19829 lifetime. We currently have no way to represent such quasi-constant
19830 values in Dwarf, so for now we just punt and generate nothing. */
19831 return false;
19832
19833 case HIGH:
19834 case CONST_FIXED:
19835 case MINUS:
19836 case SIGN_EXTEND:
19837 case ZERO_EXTEND:
19838 case CONST_POLY_INT:
19839 return false;
19840
19841 case MEM:
19842 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19843 && MEM_READONLY_P (rtl)
19844 && GET_MODE (rtl) == BLKmode)
19845 {
19846 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19847 return true;
19848 }
19849 return false;
19850
19851 default:
19852 /* No other kinds of rtx should be possible here. */
19853 gcc_unreachable ();
19854 }
19855 return false;
19856 }
19857
19858 /* Determine whether the evaluation of EXPR references any variables
19859 or functions which aren't otherwise used (and therefore may not be
19860 output). */
19861 static tree
19862 reference_to_unused (tree * tp, int * walk_subtrees,
19863 void * data ATTRIBUTE_UNUSED)
19864 {
19865 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19866 *walk_subtrees = 0;
19867
19868 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19869 && ! TREE_ASM_WRITTEN (*tp))
19870 return *tp;
19871 /* ??? The C++ FE emits debug information for using decls, so
19872 putting gcc_unreachable here falls over. See PR31899. For now
19873 be conservative. */
19874 else if (!symtab->global_info_ready && VAR_P (*tp))
19875 return *tp;
19876 else if (VAR_P (*tp))
19877 {
19878 varpool_node *node = varpool_node::get (*tp);
19879 if (!node || !node->definition)
19880 return *tp;
19881 }
19882 else if (TREE_CODE (*tp) == FUNCTION_DECL
19883 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19884 {
19885 /* The call graph machinery must have finished analyzing,
19886 optimizing and gimplifying the CU by now.
19887 So if *TP has no call graph node associated
19888 to it, it means *TP will not be emitted. */
19889 if (!symtab->global_info_ready || !cgraph_node::get (*tp))
19890 return *tp;
19891 }
19892 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19893 return *tp;
19894
19895 return NULL_TREE;
19896 }
19897
19898 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19899 for use in a later add_const_value_attribute call. */
19900
19901 static rtx
19902 rtl_for_decl_init (tree init, tree type)
19903 {
19904 rtx rtl = NULL_RTX;
19905
19906 STRIP_NOPS (init);
19907
19908 /* If a variable is initialized with a string constant without embedded
19909 zeros, build CONST_STRING. */
19910 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19911 {
19912 tree enttype = TREE_TYPE (type);
19913 tree domain = TYPE_DOMAIN (type);
19914 scalar_int_mode mode;
19915
19916 if (is_int_mode (TYPE_MODE (enttype), &mode)
19917 && GET_MODE_SIZE (mode) == 1
19918 && domain
19919 && TYPE_MAX_VALUE (domain)
19920 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19921 && integer_zerop (TYPE_MIN_VALUE (domain))
19922 && compare_tree_int (TYPE_MAX_VALUE (domain),
19923 TREE_STRING_LENGTH (init) - 1) == 0
19924 && ((size_t) TREE_STRING_LENGTH (init)
19925 == strlen (TREE_STRING_POINTER (init)) + 1))
19926 {
19927 rtl = gen_rtx_CONST_STRING (VOIDmode,
19928 ggc_strdup (TREE_STRING_POINTER (init)));
19929 rtl = gen_rtx_MEM (BLKmode, rtl);
19930 MEM_READONLY_P (rtl) = 1;
19931 }
19932 }
19933 /* Other aggregates, and complex values, could be represented using
19934 CONCAT: FIXME! */
19935 else if (AGGREGATE_TYPE_P (type)
19936 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19937 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19938 || TREE_CODE (type) == COMPLEX_TYPE)
19939 ;
19940 /* Vectors only work if their mode is supported by the target.
19941 FIXME: generic vectors ought to work too. */
19942 else if (TREE_CODE (type) == VECTOR_TYPE
19943 && !VECTOR_MODE_P (TYPE_MODE (type)))
19944 ;
19945 /* If the initializer is something that we know will expand into an
19946 immediate RTL constant, expand it now. We must be careful not to
19947 reference variables which won't be output. */
19948 else if (initializer_constant_valid_p (init, type)
19949 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19950 {
19951 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19952 possible. */
19953 if (TREE_CODE (type) == VECTOR_TYPE)
19954 switch (TREE_CODE (init))
19955 {
19956 case VECTOR_CST:
19957 break;
19958 case CONSTRUCTOR:
19959 if (TREE_CONSTANT (init))
19960 {
19961 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19962 bool constant_p = true;
19963 tree value;
19964 unsigned HOST_WIDE_INT ix;
19965
19966 /* Even when ctor is constant, it might contain non-*_CST
19967 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19968 belong into VECTOR_CST nodes. */
19969 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19970 if (!CONSTANT_CLASS_P (value))
19971 {
19972 constant_p = false;
19973 break;
19974 }
19975
19976 if (constant_p)
19977 {
19978 init = build_vector_from_ctor (type, elts);
19979 break;
19980 }
19981 }
19982 /* FALLTHRU */
19983
19984 default:
19985 return NULL;
19986 }
19987
19988 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19989
19990 /* If expand_expr returns a MEM, it wasn't immediate. */
19991 gcc_assert (!rtl || !MEM_P (rtl));
19992 }
19993
19994 return rtl;
19995 }
19996
19997 /* Generate RTL for the variable DECL to represent its location. */
19998
19999 static rtx
20000 rtl_for_decl_location (tree decl)
20001 {
20002 rtx rtl;
20003
20004 /* Here we have to decide where we are going to say the parameter "lives"
20005 (as far as the debugger is concerned). We only have a couple of
20006 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
20007
20008 DECL_RTL normally indicates where the parameter lives during most of the
20009 activation of the function. If optimization is enabled however, this
20010 could be either NULL or else a pseudo-reg. Both of those cases indicate
20011 that the parameter doesn't really live anywhere (as far as the code
20012 generation parts of GCC are concerned) during most of the function's
20013 activation. That will happen (for example) if the parameter is never
20014 referenced within the function.
20015
20016 We could just generate a location descriptor here for all non-NULL
20017 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
20018 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
20019 where DECL_RTL is NULL or is a pseudo-reg.
20020
20021 Note however that we can only get away with using DECL_INCOMING_RTL as
20022 a backup substitute for DECL_RTL in certain limited cases. In cases
20023 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
20024 we can be sure that the parameter was passed using the same type as it is
20025 declared to have within the function, and that its DECL_INCOMING_RTL
20026 points us to a place where a value of that type is passed.
20027
20028 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
20029 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
20030 because in these cases DECL_INCOMING_RTL points us to a value of some
20031 type which is *different* from the type of the parameter itself. Thus,
20032 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
20033 such cases, the debugger would end up (for example) trying to fetch a
20034 `float' from a place which actually contains the first part of a
20035 `double'. That would lead to really incorrect and confusing
20036 output at debug-time.
20037
20038 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
20039 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
20040 are a couple of exceptions however. On little-endian machines we can
20041 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
20042 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
20043 an integral type that is smaller than TREE_TYPE (decl). These cases arise
20044 when (on a little-endian machine) a non-prototyped function has a
20045 parameter declared to be of type `short' or `char'. In such cases,
20046 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
20047 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
20048 passed `int' value. If the debugger then uses that address to fetch
20049 a `short' or a `char' (on a little-endian machine) the result will be
20050 the correct data, so we allow for such exceptional cases below.
20051
20052 Note that our goal here is to describe the place where the given formal
20053 parameter lives during most of the function's activation (i.e. between the
20054 end of the prologue and the start of the epilogue). We'll do that as best
20055 as we can. Note however that if the given formal parameter is modified
20056 sometime during the execution of the function, then a stack backtrace (at
20057 debug-time) will show the function as having been called with the *new*
20058 value rather than the value which was originally passed in. This happens
20059 rarely enough that it is not a major problem, but it *is* a problem, and
20060 I'd like to fix it.
20061
20062 A future version of dwarf2out.c may generate two additional attributes for
20063 any given DW_TAG_formal_parameter DIE which will describe the "passed
20064 type" and the "passed location" for the given formal parameter in addition
20065 to the attributes we now generate to indicate the "declared type" and the
20066 "active location" for each parameter. This additional set of attributes
20067 could be used by debuggers for stack backtraces. Separately, note that
20068 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
20069 This happens (for example) for inlined-instances of inline function formal
20070 parameters which are never referenced. This really shouldn't be
20071 happening. All PARM_DECL nodes should get valid non-NULL
20072 DECL_INCOMING_RTL values. FIXME. */
20073
20074 /* Use DECL_RTL as the "location" unless we find something better. */
20075 rtl = DECL_RTL_IF_SET (decl);
20076
20077 /* When generating abstract instances, ignore everything except
20078 constants, symbols living in memory, and symbols living in
20079 fixed registers. */
20080 if (! reload_completed)
20081 {
20082 if (rtl
20083 && (CONSTANT_P (rtl)
20084 || (MEM_P (rtl)
20085 && CONSTANT_P (XEXP (rtl, 0)))
20086 || (REG_P (rtl)
20087 && VAR_P (decl)
20088 && TREE_STATIC (decl))))
20089 {
20090 rtl = targetm.delegitimize_address (rtl);
20091 return rtl;
20092 }
20093 rtl = NULL_RTX;
20094 }
20095 else if (TREE_CODE (decl) == PARM_DECL)
20096 {
20097 if (rtl == NULL_RTX
20098 || is_pseudo_reg (rtl)
20099 || (MEM_P (rtl)
20100 && is_pseudo_reg (XEXP (rtl, 0))
20101 && DECL_INCOMING_RTL (decl)
20102 && MEM_P (DECL_INCOMING_RTL (decl))
20103 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
20104 {
20105 tree declared_type = TREE_TYPE (decl);
20106 tree passed_type = DECL_ARG_TYPE (decl);
20107 machine_mode dmode = TYPE_MODE (declared_type);
20108 machine_mode pmode = TYPE_MODE (passed_type);
20109
20110 /* This decl represents a formal parameter which was optimized out.
20111 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
20112 all cases where (rtl == NULL_RTX) just below. */
20113 if (dmode == pmode)
20114 rtl = DECL_INCOMING_RTL (decl);
20115 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
20116 && SCALAR_INT_MODE_P (dmode)
20117 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
20118 && DECL_INCOMING_RTL (decl))
20119 {
20120 rtx inc = DECL_INCOMING_RTL (decl);
20121 if (REG_P (inc))
20122 rtl = inc;
20123 else if (MEM_P (inc))
20124 {
20125 if (BYTES_BIG_ENDIAN)
20126 rtl = adjust_address_nv (inc, dmode,
20127 GET_MODE_SIZE (pmode)
20128 - GET_MODE_SIZE (dmode));
20129 else
20130 rtl = inc;
20131 }
20132 }
20133 }
20134
20135 /* If the parm was passed in registers, but lives on the stack, then
20136 make a big endian correction if the mode of the type of the
20137 parameter is not the same as the mode of the rtl. */
20138 /* ??? This is the same series of checks that are made in dbxout.c before
20139 we reach the big endian correction code there. It isn't clear if all
20140 of these checks are necessary here, but keeping them all is the safe
20141 thing to do. */
20142 else if (MEM_P (rtl)
20143 && XEXP (rtl, 0) != const0_rtx
20144 && ! CONSTANT_P (XEXP (rtl, 0))
20145 /* Not passed in memory. */
20146 && !MEM_P (DECL_INCOMING_RTL (decl))
20147 /* Not passed by invisible reference. */
20148 && (!REG_P (XEXP (rtl, 0))
20149 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20150 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20151 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20152 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20153 #endif
20154 )
20155 /* Big endian correction check. */
20156 && BYTES_BIG_ENDIAN
20157 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20158 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20159 UNITS_PER_WORD))
20160 {
20161 machine_mode addr_mode = get_address_mode (rtl);
20162 poly_int64 offset = (UNITS_PER_WORD
20163 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20164
20165 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20166 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20167 }
20168 }
20169 else if (VAR_P (decl)
20170 && rtl
20171 && MEM_P (rtl)
20172 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20173 {
20174 machine_mode addr_mode = get_address_mode (rtl);
20175 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20176 GET_MODE (rtl));
20177
20178 /* If a variable is declared "register" yet is smaller than
20179 a register, then if we store the variable to memory, it
20180 looks like we're storing a register-sized value, when in
20181 fact we are not. We need to adjust the offset of the
20182 storage location to reflect the actual value's bytes,
20183 else gdb will not be able to display it. */
20184 if (maybe_ne (offset, 0))
20185 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20186 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20187 }
20188
20189 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20190 and will have been substituted directly into all expressions that use it.
20191 C does not have such a concept, but C++ and other languages do. */
20192 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20193 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20194
20195 if (rtl)
20196 rtl = targetm.delegitimize_address (rtl);
20197
20198 /* If we don't look past the constant pool, we risk emitting a
20199 reference to a constant pool entry that isn't referenced from
20200 code, and thus is not emitted. */
20201 if (rtl)
20202 rtl = avoid_constant_pool_reference (rtl);
20203
20204 /* Try harder to get a rtl. If this symbol ends up not being emitted
20205 in the current CU, resolve_addr will remove the expression referencing
20206 it. */
20207 if (rtl == NULL_RTX
20208 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20209 && VAR_P (decl)
20210 && !DECL_EXTERNAL (decl)
20211 && TREE_STATIC (decl)
20212 && DECL_NAME (decl)
20213 && !DECL_HARD_REGISTER (decl)
20214 && DECL_MODE (decl) != VOIDmode)
20215 {
20216 rtl = make_decl_rtl_for_debug (decl);
20217 if (!MEM_P (rtl)
20218 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20219 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20220 rtl = NULL_RTX;
20221 }
20222
20223 return rtl;
20224 }
20225
20226 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20227 returned. If so, the decl for the COMMON block is returned, and the
20228 value is the offset into the common block for the symbol. */
20229
20230 static tree
20231 fortran_common (tree decl, HOST_WIDE_INT *value)
20232 {
20233 tree val_expr, cvar;
20234 machine_mode mode;
20235 poly_int64 bitsize, bitpos;
20236 tree offset;
20237 HOST_WIDE_INT cbitpos;
20238 int unsignedp, reversep, volatilep = 0;
20239
20240 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20241 it does not have a value (the offset into the common area), or if it
20242 is thread local (as opposed to global) then it isn't common, and shouldn't
20243 be handled as such. */
20244 if (!VAR_P (decl)
20245 || !TREE_STATIC (decl)
20246 || !DECL_HAS_VALUE_EXPR_P (decl)
20247 || !is_fortran ())
20248 return NULL_TREE;
20249
20250 val_expr = DECL_VALUE_EXPR (decl);
20251 if (TREE_CODE (val_expr) != COMPONENT_REF)
20252 return NULL_TREE;
20253
20254 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20255 &unsignedp, &reversep, &volatilep);
20256
20257 if (cvar == NULL_TREE
20258 || !VAR_P (cvar)
20259 || DECL_ARTIFICIAL (cvar)
20260 || !TREE_PUBLIC (cvar)
20261 /* We don't expect to have to cope with variable offsets,
20262 since at present all static data must have a constant size. */
20263 || !bitpos.is_constant (&cbitpos))
20264 return NULL_TREE;
20265
20266 *value = 0;
20267 if (offset != NULL)
20268 {
20269 if (!tree_fits_shwi_p (offset))
20270 return NULL_TREE;
20271 *value = tree_to_shwi (offset);
20272 }
20273 if (cbitpos != 0)
20274 *value += cbitpos / BITS_PER_UNIT;
20275
20276 return cvar;
20277 }
20278
20279 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20280 data attribute for a variable or a parameter. We generate the
20281 DW_AT_const_value attribute only in those cases where the given variable
20282 or parameter does not have a true "location" either in memory or in a
20283 register. This can happen (for example) when a constant is passed as an
20284 actual argument in a call to an inline function. (It's possible that
20285 these things can crop up in other ways also.) Note that one type of
20286 constant value which can be passed into an inlined function is a constant
20287 pointer. This can happen for example if an actual argument in an inlined
20288 function call evaluates to a compile-time constant address.
20289
20290 CACHE_P is true if it is worth caching the location list for DECL,
20291 so that future calls can reuse it rather than regenerate it from scratch.
20292 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20293 since we will need to refer to them each time the function is inlined. */
20294
20295 static bool
20296 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20297 {
20298 rtx rtl;
20299 dw_loc_list_ref list;
20300 var_loc_list *loc_list;
20301 cached_dw_loc_list *cache;
20302
20303 if (early_dwarf)
20304 return false;
20305
20306 if (TREE_CODE (decl) == ERROR_MARK)
20307 return false;
20308
20309 if (get_AT (die, DW_AT_location)
20310 || get_AT (die, DW_AT_const_value))
20311 return true;
20312
20313 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20314 || TREE_CODE (decl) == RESULT_DECL);
20315
20316 /* Try to get some constant RTL for this decl, and use that as the value of
20317 the location. */
20318
20319 rtl = rtl_for_decl_location (decl);
20320 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20321 && add_const_value_attribute (die, rtl))
20322 return true;
20323
20324 /* See if we have single element location list that is equivalent to
20325 a constant value. That way we are better to use add_const_value_attribute
20326 rather than expanding constant value equivalent. */
20327 loc_list = lookup_decl_loc (decl);
20328 if (loc_list
20329 && loc_list->first
20330 && loc_list->first->next == NULL
20331 && NOTE_P (loc_list->first->loc)
20332 && NOTE_VAR_LOCATION (loc_list->first->loc)
20333 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20334 {
20335 struct var_loc_node *node;
20336
20337 node = loc_list->first;
20338 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20339 if (GET_CODE (rtl) == EXPR_LIST)
20340 rtl = XEXP (rtl, 0);
20341 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20342 && add_const_value_attribute (die, rtl))
20343 return true;
20344 }
20345 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20346 list several times. See if we've already cached the contents. */
20347 list = NULL;
20348 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20349 cache_p = false;
20350 if (cache_p)
20351 {
20352 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20353 if (cache)
20354 list = cache->loc_list;
20355 }
20356 if (list == NULL)
20357 {
20358 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20359 NULL);
20360 /* It is usually worth caching this result if the decl is from
20361 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20362 if (cache_p && list && list->dw_loc_next)
20363 {
20364 cached_dw_loc_list **slot
20365 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20366 DECL_UID (decl),
20367 INSERT);
20368 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20369 cache->decl_id = DECL_UID (decl);
20370 cache->loc_list = list;
20371 *slot = cache;
20372 }
20373 }
20374 if (list)
20375 {
20376 add_AT_location_description (die, DW_AT_location, list);
20377 return true;
20378 }
20379 /* None of that worked, so it must not really have a location;
20380 try adding a constant value attribute from the DECL_INITIAL. */
20381 return tree_add_const_value_attribute_for_decl (die, decl);
20382 }
20383
20384 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20385 attribute is the const value T. */
20386
20387 static bool
20388 tree_add_const_value_attribute (dw_die_ref die, tree t)
20389 {
20390 tree init;
20391 tree type = TREE_TYPE (t);
20392 rtx rtl;
20393
20394 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20395 return false;
20396
20397 init = t;
20398 gcc_assert (!DECL_P (init));
20399
20400 if (TREE_CODE (init) == INTEGER_CST)
20401 {
20402 if (tree_fits_uhwi_p (init))
20403 {
20404 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20405 return true;
20406 }
20407 if (tree_fits_shwi_p (init))
20408 {
20409 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20410 return true;
20411 }
20412 }
20413 /* Generate the RTL even if early_dwarf to force mangling of all refered to
20414 symbols. */
20415 rtl = rtl_for_decl_init (init, type);
20416 if (rtl && !early_dwarf)
20417 return add_const_value_attribute (die, rtl);
20418 /* If the host and target are sane, try harder. */
20419 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20420 && initializer_constant_valid_p (init, type))
20421 {
20422 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20423 if (size > 0 && (int) size == size)
20424 {
20425 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20426
20427 if (native_encode_initializer (init, array, size) == size)
20428 {
20429 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20430 return true;
20431 }
20432 ggc_free (array);
20433 }
20434 }
20435 return false;
20436 }
20437
20438 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20439 attribute is the const value of T, where T is an integral constant
20440 variable with static storage duration
20441 (so it can't be a PARM_DECL or a RESULT_DECL). */
20442
20443 static bool
20444 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20445 {
20446
20447 if (!decl
20448 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20449 || (VAR_P (decl) && !TREE_STATIC (decl)))
20450 return false;
20451
20452 if (TREE_READONLY (decl)
20453 && ! TREE_THIS_VOLATILE (decl)
20454 && DECL_INITIAL (decl))
20455 /* OK */;
20456 else
20457 return false;
20458
20459 /* Don't add DW_AT_const_value if abstract origin already has one. */
20460 if (get_AT (var_die, DW_AT_const_value))
20461 return false;
20462
20463 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20464 }
20465
20466 /* Convert the CFI instructions for the current function into a
20467 location list. This is used for DW_AT_frame_base when we targeting
20468 a dwarf2 consumer that does not support the dwarf3
20469 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20470 expressions. */
20471
20472 static dw_loc_list_ref
20473 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20474 {
20475 int ix;
20476 dw_fde_ref fde;
20477 dw_loc_list_ref list, *list_tail;
20478 dw_cfi_ref cfi;
20479 dw_cfa_location last_cfa, next_cfa;
20480 const char *start_label, *last_label, *section;
20481 dw_cfa_location remember;
20482
20483 fde = cfun->fde;
20484 gcc_assert (fde != NULL);
20485
20486 section = secname_for_decl (current_function_decl);
20487 list_tail = &list;
20488 list = NULL;
20489
20490 memset (&next_cfa, 0, sizeof (next_cfa));
20491 next_cfa.reg = INVALID_REGNUM;
20492 remember = next_cfa;
20493
20494 start_label = fde->dw_fde_begin;
20495
20496 /* ??? Bald assumption that the CIE opcode list does not contain
20497 advance opcodes. */
20498 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20499 lookup_cfa_1 (cfi, &next_cfa, &remember);
20500
20501 last_cfa = next_cfa;
20502 last_label = start_label;
20503
20504 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20505 {
20506 /* If the first partition contained no CFI adjustments, the
20507 CIE opcodes apply to the whole first partition. */
20508 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20509 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20510 list_tail =&(*list_tail)->dw_loc_next;
20511 start_label = last_label = fde->dw_fde_second_begin;
20512 }
20513
20514 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20515 {
20516 switch (cfi->dw_cfi_opc)
20517 {
20518 case DW_CFA_set_loc:
20519 case DW_CFA_advance_loc1:
20520 case DW_CFA_advance_loc2:
20521 case DW_CFA_advance_loc4:
20522 if (!cfa_equal_p (&last_cfa, &next_cfa))
20523 {
20524 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20525 start_label, 0, last_label, 0, section);
20526
20527 list_tail = &(*list_tail)->dw_loc_next;
20528 last_cfa = next_cfa;
20529 start_label = last_label;
20530 }
20531 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20532 break;
20533
20534 case DW_CFA_advance_loc:
20535 /* The encoding is complex enough that we should never emit this. */
20536 gcc_unreachable ();
20537
20538 default:
20539 lookup_cfa_1 (cfi, &next_cfa, &remember);
20540 break;
20541 }
20542 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20543 {
20544 if (!cfa_equal_p (&last_cfa, &next_cfa))
20545 {
20546 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20547 start_label, 0, last_label, 0, section);
20548
20549 list_tail = &(*list_tail)->dw_loc_next;
20550 last_cfa = next_cfa;
20551 start_label = last_label;
20552 }
20553 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20554 start_label, 0, fde->dw_fde_end, 0, section);
20555 list_tail = &(*list_tail)->dw_loc_next;
20556 start_label = last_label = fde->dw_fde_second_begin;
20557 }
20558 }
20559
20560 if (!cfa_equal_p (&last_cfa, &next_cfa))
20561 {
20562 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20563 start_label, 0, last_label, 0, section);
20564 list_tail = &(*list_tail)->dw_loc_next;
20565 start_label = last_label;
20566 }
20567
20568 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20569 start_label, 0,
20570 fde->dw_fde_second_begin
20571 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20572 section);
20573
20574 maybe_gen_llsym (list);
20575
20576 return list;
20577 }
20578
20579 /* Compute a displacement from the "steady-state frame pointer" to the
20580 frame base (often the same as the CFA), and store it in
20581 frame_pointer_fb_offset. OFFSET is added to the displacement
20582 before the latter is negated. */
20583
20584 static void
20585 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20586 {
20587 rtx reg, elim;
20588
20589 #ifdef FRAME_POINTER_CFA_OFFSET
20590 reg = frame_pointer_rtx;
20591 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20592 #else
20593 reg = arg_pointer_rtx;
20594 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20595 #endif
20596
20597 elim = (ira_use_lra_p
20598 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20599 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20600 elim = strip_offset_and_add (elim, &offset);
20601
20602 frame_pointer_fb_offset = -offset;
20603
20604 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20605 in which to eliminate. This is because it's stack pointer isn't
20606 directly accessible as a register within the ISA. To work around
20607 this, assume that while we cannot provide a proper value for
20608 frame_pointer_fb_offset, we won't need one either. We can use
20609 hard frame pointer in debug info even if frame pointer isn't used
20610 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20611 which uses the DW_AT_frame_base attribute, not hard frame pointer
20612 directly. */
20613 frame_pointer_fb_offset_valid
20614 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20615 }
20616
20617 /* Generate a DW_AT_name attribute given some string value to be included as
20618 the value of the attribute. */
20619
20620 static void
20621 add_name_attribute (dw_die_ref die, const char *name_string)
20622 {
20623 if (name_string != NULL && *name_string != 0)
20624 {
20625 if (demangle_name_func)
20626 name_string = (*demangle_name_func) (name_string);
20627
20628 add_AT_string (die, DW_AT_name, name_string);
20629 }
20630 }
20631
20632 /* Generate a DW_AT_name attribute given some string value representing a
20633 file or filepath to be included as value of the attribute. */
20634 static void
20635 add_filename_attribute (dw_die_ref die, const char *name_string)
20636 {
20637 if (name_string != NULL && *name_string != 0)
20638 add_filepath_AT_string (die, DW_AT_name, name_string);
20639 }
20640
20641 /* Generate a DW_AT_description attribute given some string value to be included
20642 as the value of the attribute. */
20643
20644 static void
20645 add_desc_attribute (dw_die_ref die, const char *name_string)
20646 {
20647 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20648 return;
20649
20650 if (name_string == NULL || *name_string == 0)
20651 return;
20652
20653 if (demangle_name_func)
20654 name_string = (*demangle_name_func) (name_string);
20655
20656 add_AT_string (die, DW_AT_description, name_string);
20657 }
20658
20659 /* Generate a DW_AT_description attribute given some decl to be included
20660 as the value of the attribute. */
20661
20662 static void
20663 add_desc_attribute (dw_die_ref die, tree decl)
20664 {
20665 tree decl_name;
20666
20667 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20668 return;
20669
20670 if (decl == NULL_TREE || !DECL_P (decl))
20671 return;
20672 decl_name = DECL_NAME (decl);
20673
20674 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20675 {
20676 const char *name = dwarf2_name (decl, 0);
20677 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20678 }
20679 else
20680 {
20681 char *desc = print_generic_expr_to_str (decl);
20682 add_desc_attribute (die, desc);
20683 free (desc);
20684 }
20685 }
20686
20687 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20688 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20689 of TYPE accordingly.
20690
20691 ??? This is a temporary measure until after we're able to generate
20692 regular DWARF for the complex Ada type system. */
20693
20694 static void
20695 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20696 dw_die_ref context_die)
20697 {
20698 tree dtype;
20699 dw_die_ref dtype_die;
20700
20701 if (!lang_hooks.types.descriptive_type)
20702 return;
20703
20704 dtype = lang_hooks.types.descriptive_type (type);
20705 if (!dtype)
20706 return;
20707
20708 dtype_die = lookup_type_die (dtype);
20709 if (!dtype_die)
20710 {
20711 gen_type_die (dtype, context_die);
20712 dtype_die = lookup_type_die (dtype);
20713 gcc_assert (dtype_die);
20714 }
20715
20716 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20717 }
20718
20719 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20720
20721 static const char *
20722 comp_dir_string (void)
20723 {
20724 const char *wd;
20725 char *wd_plus_sep = NULL;
20726 static const char *cached_wd = NULL;
20727
20728 if (cached_wd != NULL)
20729 return cached_wd;
20730
20731 wd = get_src_pwd ();
20732 if (wd == NULL)
20733 return NULL;
20734
20735 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20736 {
20737 size_t wdlen = strlen (wd);
20738 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20739 strcpy (wd_plus_sep, wd);
20740 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20741 wd_plus_sep [wdlen + 1] = 0;
20742 wd = wd_plus_sep;
20743 }
20744
20745 cached_wd = remap_debug_filename (wd);
20746
20747 /* remap_debug_filename can just pass through wd or return a new gc string.
20748 These two types can't be both stored in a GTY(())-tagged string, but since
20749 the cached value lives forever just copy it if needed. */
20750 if (cached_wd != wd)
20751 {
20752 cached_wd = xstrdup (cached_wd);
20753 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20754 free (wd_plus_sep);
20755 }
20756
20757 return cached_wd;
20758 }
20759
20760 /* Generate a DW_AT_comp_dir attribute for DIE. */
20761
20762 static void
20763 add_comp_dir_attribute (dw_die_ref die)
20764 {
20765 const char * wd = comp_dir_string ();
20766 if (wd != NULL)
20767 add_filepath_AT_string (die, DW_AT_comp_dir, wd);
20768 }
20769
20770 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20771 pointer computation, ...), output a representation for that bound according
20772 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20773 loc_list_from_tree for the meaning of CONTEXT. */
20774
20775 static void
20776 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20777 int forms, struct loc_descr_context *context)
20778 {
20779 dw_die_ref context_die, decl_die = NULL;
20780 dw_loc_list_ref list;
20781 bool strip_conversions = true;
20782 bool placeholder_seen = false;
20783
20784 while (strip_conversions)
20785 switch (TREE_CODE (value))
20786 {
20787 case ERROR_MARK:
20788 case SAVE_EXPR:
20789 return;
20790
20791 CASE_CONVERT:
20792 case VIEW_CONVERT_EXPR:
20793 value = TREE_OPERAND (value, 0);
20794 break;
20795
20796 default:
20797 strip_conversions = false;
20798 break;
20799 }
20800
20801 /* If possible and permitted, output the attribute as a constant. */
20802 if ((forms & dw_scalar_form_constant) != 0
20803 && TREE_CODE (value) == INTEGER_CST)
20804 {
20805 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20806
20807 /* If HOST_WIDE_INT is big enough then represent the bound as
20808 a constant value. We need to choose a form based on
20809 whether the type is signed or unsigned. We cannot just
20810 call add_AT_unsigned if the value itself is positive
20811 (add_AT_unsigned might add the unsigned value encoded as
20812 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20813 bounds type and then sign extend any unsigned values found
20814 for signed types. This is needed only for
20815 DW_AT_{lower,upper}_bound, since for most other attributes,
20816 consumers will treat DW_FORM_data[1248] as unsigned values,
20817 regardless of the underlying type. */
20818 if (prec <= HOST_BITS_PER_WIDE_INT
20819 || tree_fits_uhwi_p (value))
20820 {
20821 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20822 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20823 else
20824 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20825 }
20826 else if (dwarf_version >= 5
20827 && TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (value))) == 128)
20828 /* Otherwise represent the bound as an unsigned value with
20829 the precision of its type. The precision and signedness
20830 of the type will be necessary to re-interpret it
20831 unambiguously. */
20832 add_AT_wide (die, attr, wi::to_wide (value));
20833 else
20834 {
20835 rtx v = immed_wide_int_const (wi::to_wide (value),
20836 TYPE_MODE (TREE_TYPE (value)));
20837 dw_loc_descr_ref loc
20838 = loc_descriptor (v, TYPE_MODE (TREE_TYPE (value)),
20839 VAR_INIT_STATUS_INITIALIZED);
20840 if (loc)
20841 add_AT_loc (die, attr, loc);
20842 }
20843 return;
20844 }
20845
20846 /* Otherwise, if it's possible and permitted too, output a reference to
20847 another DIE. */
20848 if ((forms & dw_scalar_form_reference) != 0)
20849 {
20850 tree decl = NULL_TREE;
20851
20852 /* Some type attributes reference an outer type. For instance, the upper
20853 bound of an array may reference an embedding record (this happens in
20854 Ada). */
20855 if (TREE_CODE (value) == COMPONENT_REF
20856 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20857 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20858 decl = TREE_OPERAND (value, 1);
20859
20860 else if (VAR_P (value)
20861 || TREE_CODE (value) == PARM_DECL
20862 || TREE_CODE (value) == RESULT_DECL)
20863 decl = value;
20864
20865 if (decl != NULL_TREE)
20866 {
20867 decl_die = lookup_decl_die (decl);
20868
20869 /* ??? Can this happen, or should the variable have been bound
20870 first? Probably it can, since I imagine that we try to create
20871 the types of parameters in the order in which they exist in
20872 the list, and won't have created a forward reference to a
20873 later parameter. */
20874 if (decl_die != NULL)
20875 {
20876 if (get_AT (decl_die, DW_AT_location)
20877 || get_AT (decl_die, DW_AT_data_member_location)
20878 || get_AT (decl_die, DW_AT_const_value))
20879 {
20880 add_AT_die_ref (die, attr, decl_die);
20881 return;
20882 }
20883 }
20884 }
20885 }
20886
20887 /* Last chance: try to create a stack operation procedure to evaluate the
20888 value. Do nothing if even that is not possible or permitted. */
20889 if ((forms & dw_scalar_form_exprloc) == 0)
20890 return;
20891
20892 list = loc_list_from_tree (value, 2, context);
20893 if (context && context->placeholder_arg)
20894 {
20895 placeholder_seen = context->placeholder_seen;
20896 context->placeholder_seen = false;
20897 }
20898 if (list == NULL || single_element_loc_list_p (list))
20899 {
20900 /* If this attribute is not a reference nor constant, it is
20901 a DWARF expression rather than location description. For that
20902 loc_list_from_tree (value, 0, &context) is needed. */
20903 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20904 if (list2 && single_element_loc_list_p (list2))
20905 {
20906 if (placeholder_seen)
20907 {
20908 struct dwarf_procedure_info dpi;
20909 dpi.fndecl = NULL_TREE;
20910 dpi.args_count = 1;
20911 if (!resolve_args_picking (list2->expr, 1, &dpi))
20912 return;
20913 }
20914 add_AT_loc (die, attr, list2->expr);
20915 return;
20916 }
20917 }
20918
20919 /* If that failed to give a single element location list, fall back to
20920 outputting this as a reference... still if permitted. */
20921 if (list == NULL
20922 || (forms & dw_scalar_form_reference) == 0
20923 || placeholder_seen)
20924 return;
20925
20926 if (!decl_die)
20927 {
20928 if (current_function_decl == 0)
20929 context_die = comp_unit_die ();
20930 else
20931 context_die = lookup_decl_die (current_function_decl);
20932
20933 decl_die = new_die (DW_TAG_variable, context_die, value);
20934 add_AT_flag (decl_die, DW_AT_artificial, 1);
20935 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20936 context_die);
20937 }
20938
20939 add_AT_location_description (decl_die, DW_AT_location, list);
20940 add_AT_die_ref (die, attr, decl_die);
20941 }
20942
20943 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20944 default. */
20945
20946 static int
20947 lower_bound_default (void)
20948 {
20949 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20950 {
20951 case DW_LANG_C:
20952 case DW_LANG_C89:
20953 case DW_LANG_C99:
20954 case DW_LANG_C11:
20955 case DW_LANG_C_plus_plus:
20956 case DW_LANG_C_plus_plus_11:
20957 case DW_LANG_C_plus_plus_14:
20958 case DW_LANG_ObjC:
20959 case DW_LANG_ObjC_plus_plus:
20960 return 0;
20961 case DW_LANG_Fortran77:
20962 case DW_LANG_Fortran90:
20963 case DW_LANG_Fortran95:
20964 case DW_LANG_Fortran03:
20965 case DW_LANG_Fortran08:
20966 return 1;
20967 case DW_LANG_UPC:
20968 case DW_LANG_D:
20969 case DW_LANG_Python:
20970 return dwarf_version >= 4 ? 0 : -1;
20971 case DW_LANG_Ada95:
20972 case DW_LANG_Ada83:
20973 case DW_LANG_Cobol74:
20974 case DW_LANG_Cobol85:
20975 case DW_LANG_Modula2:
20976 case DW_LANG_PLI:
20977 return dwarf_version >= 4 ? 1 : -1;
20978 default:
20979 return -1;
20980 }
20981 }
20982
20983 /* Given a tree node describing an array bound (either lower or upper) output
20984 a representation for that bound. */
20985
20986 static void
20987 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20988 tree bound, struct loc_descr_context *context)
20989 {
20990 int dflt;
20991
20992 while (1)
20993 switch (TREE_CODE (bound))
20994 {
20995 /* Strip all conversions. */
20996 CASE_CONVERT:
20997 case VIEW_CONVERT_EXPR:
20998 bound = TREE_OPERAND (bound, 0);
20999 break;
21000
21001 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
21002 are even omitted when they are the default. */
21003 case INTEGER_CST:
21004 /* If the value for this bound is the default one, we can even omit the
21005 attribute. */
21006 if (bound_attr == DW_AT_lower_bound
21007 && tree_fits_shwi_p (bound)
21008 && (dflt = lower_bound_default ()) != -1
21009 && tree_to_shwi (bound) == dflt)
21010 return;
21011
21012 /* FALLTHRU */
21013
21014 default:
21015 /* Because of the complex interaction there can be with other GNAT
21016 encodings, GDB isn't ready yet to handle proper DWARF description
21017 for self-referencial subrange bounds: let GNAT encodings do the
21018 magic in such a case. */
21019 if (is_ada ()
21020 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
21021 && contains_placeholder_p (bound))
21022 return;
21023
21024 add_scalar_info (subrange_die, bound_attr, bound,
21025 dw_scalar_form_constant
21026 | dw_scalar_form_exprloc
21027 | dw_scalar_form_reference,
21028 context);
21029 return;
21030 }
21031 }
21032
21033 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
21034 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
21035 Note that the block of subscript information for an array type also
21036 includes information about the element type of the given array type.
21037
21038 This function reuses previously set type and bound information if
21039 available. */
21040
21041 static void
21042 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
21043 {
21044 unsigned dimension_number;
21045 tree lower, upper;
21046 dw_die_ref child = type_die->die_child;
21047
21048 for (dimension_number = 0;
21049 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
21050 type = TREE_TYPE (type), dimension_number++)
21051 {
21052 tree domain = TYPE_DOMAIN (type);
21053
21054 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
21055 break;
21056
21057 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
21058 and (in GNU C only) variable bounds. Handle all three forms
21059 here. */
21060
21061 /* Find and reuse a previously generated DW_TAG_subrange_type if
21062 available.
21063
21064 For multi-dimensional arrays, as we iterate through the
21065 various dimensions in the enclosing for loop above, we also
21066 iterate through the DIE children and pick at each
21067 DW_TAG_subrange_type previously generated (if available).
21068 Each child DW_TAG_subrange_type DIE describes the range of
21069 the current dimension. At this point we should have as many
21070 DW_TAG_subrange_type's as we have dimensions in the
21071 array. */
21072 dw_die_ref subrange_die = NULL;
21073 if (child)
21074 while (1)
21075 {
21076 child = child->die_sib;
21077 if (child->die_tag == DW_TAG_subrange_type)
21078 subrange_die = child;
21079 if (child == type_die->die_child)
21080 {
21081 /* If we wrapped around, stop looking next time. */
21082 child = NULL;
21083 break;
21084 }
21085 if (child->die_tag == DW_TAG_subrange_type)
21086 break;
21087 }
21088 if (!subrange_die)
21089 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21090
21091 if (domain)
21092 {
21093 /* We have an array type with specified bounds. */
21094 lower = TYPE_MIN_VALUE (domain);
21095 upper = TYPE_MAX_VALUE (domain);
21096
21097 /* Define the index type. */
21098 if (TREE_TYPE (domain)
21099 && !get_AT (subrange_die, DW_AT_type))
21100 {
21101 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21102 TREE_TYPE field. We can't emit debug info for this
21103 because it is an unnamed integral type. */
21104 if (TREE_CODE (domain) == INTEGER_TYPE
21105 && TYPE_NAME (domain) == NULL_TREE
21106 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21107 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21108 ;
21109 else
21110 add_type_attribute (subrange_die, TREE_TYPE (domain),
21111 TYPE_UNQUALIFIED, false, type_die);
21112 }
21113
21114 /* ??? If upper is NULL, the array has unspecified length,
21115 but it does have a lower bound. This happens with Fortran
21116 dimension arr(N:*)
21117 Since the debugger is definitely going to need to know N
21118 to produce useful results, go ahead and output the lower
21119 bound solo, and hope the debugger can cope. */
21120
21121 if (!get_AT (subrange_die, DW_AT_lower_bound))
21122 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21123 if (!get_AT (subrange_die, DW_AT_upper_bound)
21124 && !get_AT (subrange_die, DW_AT_count))
21125 {
21126 if (upper)
21127 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21128 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21129 /* Zero-length array. */
21130 add_bound_info (subrange_die, DW_AT_count,
21131 build_int_cst (TREE_TYPE (lower), 0), NULL);
21132 }
21133 }
21134
21135 /* Otherwise we have an array type with an unspecified length. The
21136 DWARF-2 spec does not say how to handle this; let's just leave out the
21137 bounds. */
21138 }
21139 }
21140
21141 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21142
21143 static void
21144 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21145 {
21146 dw_die_ref decl_die;
21147 HOST_WIDE_INT size;
21148 dw_loc_descr_ref size_expr = NULL;
21149
21150 switch (TREE_CODE (tree_node))
21151 {
21152 case ERROR_MARK:
21153 size = 0;
21154 break;
21155 case ENUMERAL_TYPE:
21156 case RECORD_TYPE:
21157 case UNION_TYPE:
21158 case QUAL_UNION_TYPE:
21159 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21160 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21161 {
21162 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21163 return;
21164 }
21165 size_expr = type_byte_size (tree_node, &size);
21166 break;
21167 case FIELD_DECL:
21168 /* For a data member of a struct or union, the DW_AT_byte_size is
21169 generally given as the number of bytes normally allocated for an
21170 object of the *declared* type of the member itself. This is true
21171 even for bit-fields. */
21172 size = int_size_in_bytes (field_type (tree_node));
21173 break;
21174 default:
21175 gcc_unreachable ();
21176 }
21177
21178 /* Support for dynamically-sized objects was introduced by DWARFv3.
21179 At the moment, GDB does not handle variable byte sizes very well,
21180 though. */
21181 if ((dwarf_version >= 3 || !dwarf_strict)
21182 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21183 && size_expr != NULL)
21184 add_AT_loc (die, DW_AT_byte_size, size_expr);
21185
21186 /* Note that `size' might be -1 when we get to this point. If it is, that
21187 indicates that the byte size of the entity in question is variable and
21188 that we could not generate a DWARF expression that computes it. */
21189 if (size >= 0)
21190 add_AT_unsigned (die, DW_AT_byte_size, size);
21191 }
21192
21193 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21194 alignment. */
21195
21196 static void
21197 add_alignment_attribute (dw_die_ref die, tree tree_node)
21198 {
21199 if (dwarf_version < 5 && dwarf_strict)
21200 return;
21201
21202 unsigned align;
21203
21204 if (DECL_P (tree_node))
21205 {
21206 if (!DECL_USER_ALIGN (tree_node))
21207 return;
21208
21209 align = DECL_ALIGN_UNIT (tree_node);
21210 }
21211 else if (TYPE_P (tree_node))
21212 {
21213 if (!TYPE_USER_ALIGN (tree_node))
21214 return;
21215
21216 align = TYPE_ALIGN_UNIT (tree_node);
21217 }
21218 else
21219 gcc_unreachable ();
21220
21221 add_AT_unsigned (die, DW_AT_alignment, align);
21222 }
21223
21224 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21225 which specifies the distance in bits from the highest order bit of the
21226 "containing object" for the bit-field to the highest order bit of the
21227 bit-field itself.
21228
21229 For any given bit-field, the "containing object" is a hypothetical object
21230 (of some integral or enum type) within which the given bit-field lives. The
21231 type of this hypothetical "containing object" is always the same as the
21232 declared type of the individual bit-field itself. The determination of the
21233 exact location of the "containing object" for a bit-field is rather
21234 complicated. It's handled by the `field_byte_offset' function (above).
21235
21236 Note that it is the size (in bytes) of the hypothetical "containing object"
21237 which will be given in the DW_AT_byte_size attribute for this bit-field.
21238 (See `byte_size_attribute' above). */
21239
21240 static inline void
21241 add_bit_offset_attribute (dw_die_ref die, tree decl)
21242 {
21243 HOST_WIDE_INT object_offset_in_bytes;
21244 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21245 HOST_WIDE_INT bitpos_int;
21246 HOST_WIDE_INT highest_order_object_bit_offset;
21247 HOST_WIDE_INT highest_order_field_bit_offset;
21248 HOST_WIDE_INT bit_offset;
21249
21250 /* The containing object is within the DECL_CONTEXT. */
21251 struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE };
21252
21253 field_byte_offset (decl, &ctx, &object_offset_in_bytes);
21254
21255 /* Must be a field and a bit field. */
21256 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21257
21258 /* We can't yet handle bit-fields whose offsets are variable, so if we
21259 encounter such things, just return without generating any attribute
21260 whatsoever. Likewise for variable or too large size. */
21261 if (! tree_fits_shwi_p (bit_position (decl))
21262 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21263 return;
21264
21265 bitpos_int = int_bit_position (decl);
21266
21267 /* Note that the bit offset is always the distance (in bits) from the
21268 highest-order bit of the "containing object" to the highest-order bit of
21269 the bit-field itself. Since the "high-order end" of any object or field
21270 is different on big-endian and little-endian machines, the computation
21271 below must take account of these differences. */
21272 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21273 highest_order_field_bit_offset = bitpos_int;
21274
21275 if (! BYTES_BIG_ENDIAN)
21276 {
21277 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21278 highest_order_object_bit_offset +=
21279 simple_type_size_in_bits (original_type);
21280 }
21281
21282 bit_offset
21283 = (! BYTES_BIG_ENDIAN
21284 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21285 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21286
21287 if (bit_offset < 0)
21288 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21289 else
21290 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21291 }
21292
21293 /* For a FIELD_DECL node which represents a bit field, output an attribute
21294 which specifies the length in bits of the given field. */
21295
21296 static inline void
21297 add_bit_size_attribute (dw_die_ref die, tree decl)
21298 {
21299 /* Must be a field and a bit field. */
21300 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21301 && DECL_BIT_FIELD_TYPE (decl));
21302
21303 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21304 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21305 }
21306
21307 /* If the compiled language is ANSI C, then add a 'prototyped'
21308 attribute, if arg types are given for the parameters of a function. */
21309
21310 static inline void
21311 add_prototyped_attribute (dw_die_ref die, tree func_type)
21312 {
21313 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21314 {
21315 case DW_LANG_C:
21316 case DW_LANG_C89:
21317 case DW_LANG_C99:
21318 case DW_LANG_C11:
21319 case DW_LANG_ObjC:
21320 if (prototype_p (func_type))
21321 add_AT_flag (die, DW_AT_prototyped, 1);
21322 break;
21323 default:
21324 break;
21325 }
21326 }
21327
21328 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21329 by looking in the type declaration, the object declaration equate table or
21330 the block mapping. */
21331
21332 static inline void
21333 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21334 {
21335 dw_die_ref origin_die = NULL;
21336
21337 /* For late LTO debug output we want to refer directly to the abstract
21338 DIE in the early debug rather to the possibly existing concrete
21339 instance and avoid creating that just for this purpose. */
21340 sym_off_pair *desc;
21341 if (in_lto_p
21342 && external_die_map
21343 && (desc = external_die_map->get (origin)))
21344 {
21345 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21346 desc->sym, desc->off);
21347 return;
21348 }
21349
21350 if (DECL_P (origin))
21351 origin_die = lookup_decl_die (origin);
21352 else if (TYPE_P (origin))
21353 origin_die = lookup_type_die (origin);
21354 else if (TREE_CODE (origin) == BLOCK)
21355 origin_die = lookup_block_die (origin);
21356
21357 /* XXX: Functions that are never lowered don't always have correct block
21358 trees (in the case of java, they simply have no block tree, in some other
21359 languages). For these functions, there is nothing we can really do to
21360 output correct debug info for inlined functions in all cases. Rather
21361 than die, we'll just produce deficient debug info now, in that we will
21362 have variables without a proper abstract origin. In the future, when all
21363 functions are lowered, we should re-add a gcc_assert (origin_die)
21364 here. */
21365
21366 if (origin_die)
21367 {
21368 dw_attr_node *a;
21369 /* Like above, if we already created a concrete instance DIE
21370 do not use that for the abstract origin but the early DIE
21371 if present. */
21372 if (in_lto_p
21373 && (a = get_AT (origin_die, DW_AT_abstract_origin)))
21374 origin_die = AT_ref (a);
21375 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21376 }
21377 }
21378
21379 /* We do not currently support the pure_virtual attribute. */
21380
21381 static inline void
21382 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21383 {
21384 if (DECL_VINDEX (func_decl))
21385 {
21386 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21387
21388 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21389 add_AT_loc (die, DW_AT_vtable_elem_location,
21390 new_loc_descr (DW_OP_constu,
21391 tree_to_shwi (DECL_VINDEX (func_decl)),
21392 0));
21393
21394 /* GNU extension: Record what type this method came from originally. */
21395 if (debug_info_level > DINFO_LEVEL_TERSE
21396 && DECL_CONTEXT (func_decl))
21397 add_AT_die_ref (die, DW_AT_containing_type,
21398 lookup_type_die (DECL_CONTEXT (func_decl)));
21399 }
21400 }
21401 \f
21402 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21403 given decl. This used to be a vendor extension until after DWARF 4
21404 standardized it. */
21405
21406 static void
21407 add_linkage_attr (dw_die_ref die, tree decl)
21408 {
21409 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21410
21411 /* Mimic what assemble_name_raw does with a leading '*'. */
21412 if (name[0] == '*')
21413 name = &name[1];
21414
21415 if (dwarf_version >= 4)
21416 add_AT_string (die, DW_AT_linkage_name, name);
21417 else
21418 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21419 }
21420
21421 /* Add source coordinate attributes for the given decl. */
21422
21423 static void
21424 add_src_coords_attributes (dw_die_ref die, tree decl)
21425 {
21426 expanded_location s;
21427
21428 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21429 return;
21430 s = expand_location (DECL_SOURCE_LOCATION (decl));
21431 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21432 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21433 if (debug_column_info && s.column)
21434 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21435 }
21436
21437 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21438
21439 static void
21440 add_linkage_name_raw (dw_die_ref die, tree decl)
21441 {
21442 /* Defer until we have an assembler name set. */
21443 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21444 {
21445 limbo_die_node *asm_name;
21446
21447 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21448 asm_name->die = die;
21449 asm_name->created_for = decl;
21450 asm_name->next = deferred_asm_name;
21451 deferred_asm_name = asm_name;
21452 }
21453 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21454 add_linkage_attr (die, decl);
21455 }
21456
21457 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21458
21459 static void
21460 add_linkage_name (dw_die_ref die, tree decl)
21461 {
21462 if (debug_info_level > DINFO_LEVEL_NONE
21463 && VAR_OR_FUNCTION_DECL_P (decl)
21464 && TREE_PUBLIC (decl)
21465 && !(VAR_P (decl) && DECL_REGISTER (decl))
21466 && die->die_tag != DW_TAG_member)
21467 add_linkage_name_raw (die, decl);
21468 }
21469
21470 /* Add a DW_AT_name attribute and source coordinate attribute for the
21471 given decl, but only if it actually has a name. */
21472
21473 static void
21474 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21475 bool no_linkage_name)
21476 {
21477 tree decl_name;
21478
21479 decl_name = DECL_NAME (decl);
21480 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21481 {
21482 const char *name = dwarf2_name (decl, 0);
21483 if (name)
21484 add_name_attribute (die, name);
21485 else
21486 add_desc_attribute (die, decl);
21487
21488 if (! DECL_ARTIFICIAL (decl))
21489 add_src_coords_attributes (die, decl);
21490
21491 if (!no_linkage_name)
21492 add_linkage_name (die, decl);
21493 }
21494 else
21495 add_desc_attribute (die, decl);
21496
21497 #ifdef VMS_DEBUGGING_INFO
21498 /* Get the function's name, as described by its RTL. This may be different
21499 from the DECL_NAME name used in the source file. */
21500 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21501 {
21502 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21503 XEXP (DECL_RTL (decl), 0), false);
21504 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21505 }
21506 #endif /* VMS_DEBUGGING_INFO */
21507 }
21508
21509 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21510
21511 static void
21512 add_discr_value (dw_die_ref die, dw_discr_value *value)
21513 {
21514 dw_attr_node attr;
21515
21516 attr.dw_attr = DW_AT_discr_value;
21517 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21518 attr.dw_attr_val.val_entry = NULL;
21519 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21520 if (value->pos)
21521 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21522 else
21523 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21524 add_dwarf_attr (die, &attr);
21525 }
21526
21527 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21528
21529 static void
21530 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21531 {
21532 dw_attr_node attr;
21533
21534 attr.dw_attr = DW_AT_discr_list;
21535 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21536 attr.dw_attr_val.val_entry = NULL;
21537 attr.dw_attr_val.v.val_discr_list = discr_list;
21538 add_dwarf_attr (die, &attr);
21539 }
21540
21541 static inline dw_discr_list_ref
21542 AT_discr_list (dw_attr_node *attr)
21543 {
21544 return attr->dw_attr_val.v.val_discr_list;
21545 }
21546
21547 #ifdef VMS_DEBUGGING_INFO
21548 /* Output the debug main pointer die for VMS */
21549
21550 void
21551 dwarf2out_vms_debug_main_pointer (void)
21552 {
21553 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21554 dw_die_ref die;
21555
21556 /* Allocate the VMS debug main subprogram die. */
21557 die = new_die_raw (DW_TAG_subprogram);
21558 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21559 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21560 current_function_funcdef_no);
21561 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21562
21563 /* Make it the first child of comp_unit_die (). */
21564 die->die_parent = comp_unit_die ();
21565 if (comp_unit_die ()->die_child)
21566 {
21567 die->die_sib = comp_unit_die ()->die_child->die_sib;
21568 comp_unit_die ()->die_child->die_sib = die;
21569 }
21570 else
21571 {
21572 die->die_sib = die;
21573 comp_unit_die ()->die_child = die;
21574 }
21575 }
21576 #endif /* VMS_DEBUGGING_INFO */
21577
21578 /* walk_tree helper function for uses_local_type, below. */
21579
21580 static tree
21581 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21582 {
21583 if (!TYPE_P (*tp))
21584 *walk_subtrees = 0;
21585 else
21586 {
21587 tree name = TYPE_NAME (*tp);
21588 if (name && DECL_P (name) && decl_function_context (name))
21589 return *tp;
21590 }
21591 return NULL_TREE;
21592 }
21593
21594 /* If TYPE involves a function-local type (including a local typedef to a
21595 non-local type), returns that type; otherwise returns NULL_TREE. */
21596
21597 static tree
21598 uses_local_type (tree type)
21599 {
21600 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21601 return used;
21602 }
21603
21604 /* Return the DIE for the scope that immediately contains this type.
21605 Non-named types that do not involve a function-local type get global
21606 scope. Named types nested in namespaces or other types get their
21607 containing scope. All other types (i.e. function-local named types) get
21608 the current active scope. */
21609
21610 static dw_die_ref
21611 scope_die_for (tree t, dw_die_ref context_die)
21612 {
21613 dw_die_ref scope_die = NULL;
21614 tree containing_scope;
21615
21616 /* Non-types always go in the current scope. */
21617 gcc_assert (TYPE_P (t));
21618
21619 /* Use the scope of the typedef, rather than the scope of the type
21620 it refers to. */
21621 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21622 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21623 else
21624 containing_scope = TYPE_CONTEXT (t);
21625
21626 /* Use the containing namespace if there is one. */
21627 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21628 {
21629 if (context_die == lookup_decl_die (containing_scope))
21630 /* OK */;
21631 else if (debug_info_level > DINFO_LEVEL_TERSE)
21632 context_die = get_context_die (containing_scope);
21633 else
21634 containing_scope = NULL_TREE;
21635 }
21636
21637 /* Ignore function type "scopes" from the C frontend. They mean that
21638 a tagged type is local to a parmlist of a function declarator, but
21639 that isn't useful to DWARF. */
21640 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21641 containing_scope = NULL_TREE;
21642
21643 if (SCOPE_FILE_SCOPE_P (containing_scope))
21644 {
21645 /* If T uses a local type keep it local as well, to avoid references
21646 to function-local DIEs from outside the function. */
21647 if (current_function_decl && uses_local_type (t))
21648 scope_die = context_die;
21649 else
21650 scope_die = comp_unit_die ();
21651 }
21652 else if (TYPE_P (containing_scope))
21653 {
21654 /* For types, we can just look up the appropriate DIE. */
21655 if (debug_info_level > DINFO_LEVEL_TERSE)
21656 scope_die = get_context_die (containing_scope);
21657 else
21658 {
21659 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21660 if (scope_die == NULL)
21661 scope_die = comp_unit_die ();
21662 }
21663 }
21664 else
21665 scope_die = context_die;
21666
21667 return scope_die;
21668 }
21669
21670 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21671
21672 static inline int
21673 local_scope_p (dw_die_ref context_die)
21674 {
21675 for (; context_die; context_die = context_die->die_parent)
21676 if (context_die->die_tag == DW_TAG_inlined_subroutine
21677 || context_die->die_tag == DW_TAG_subprogram)
21678 return 1;
21679
21680 return 0;
21681 }
21682
21683 /* Returns nonzero if CONTEXT_DIE is a class. */
21684
21685 static inline int
21686 class_scope_p (dw_die_ref context_die)
21687 {
21688 return (context_die
21689 && (context_die->die_tag == DW_TAG_structure_type
21690 || context_die->die_tag == DW_TAG_class_type
21691 || context_die->die_tag == DW_TAG_interface_type
21692 || context_die->die_tag == DW_TAG_union_type));
21693 }
21694
21695 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21696 whether or not to treat a DIE in this context as a declaration. */
21697
21698 static inline int
21699 class_or_namespace_scope_p (dw_die_ref context_die)
21700 {
21701 return (class_scope_p (context_die)
21702 || (context_die && context_die->die_tag == DW_TAG_namespace));
21703 }
21704
21705 /* Many forms of DIEs require a "type description" attribute. This
21706 routine locates the proper "type descriptor" die for the type given
21707 by 'type' plus any additional qualifiers given by 'cv_quals', and
21708 adds a DW_AT_type attribute below the given die. */
21709
21710 static void
21711 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21712 bool reverse, dw_die_ref context_die)
21713 {
21714 enum tree_code code = TREE_CODE (type);
21715 dw_die_ref type_die = NULL;
21716
21717 if (debug_info_level <= DINFO_LEVEL_TERSE)
21718 return;
21719
21720 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21721 or fixed-point type, use the inner type. This is because we have no
21722 support for unnamed types in base_type_die. This can happen if this is
21723 an Ada subrange type. Correct solution is emit a subrange type die. */
21724 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21725 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21726 type = TREE_TYPE (type), code = TREE_CODE (type);
21727
21728 if (code == ERROR_MARK
21729 /* Handle a special case. For functions whose return type is void, we
21730 generate *no* type attribute. (Note that no object may have type
21731 `void', so this only applies to function return types). */
21732 || code == VOID_TYPE)
21733 return;
21734
21735 type_die = modified_type_die (type,
21736 cv_quals | TYPE_QUALS (type),
21737 reverse,
21738 context_die);
21739
21740 if (type_die != NULL)
21741 add_AT_die_ref (object_die, DW_AT_type, type_die);
21742 }
21743
21744 /* Given an object die, add the calling convention attribute for the
21745 function call type. */
21746 static void
21747 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21748 {
21749 enum dwarf_calling_convention value = DW_CC_normal;
21750
21751 value = ((enum dwarf_calling_convention)
21752 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21753
21754 if (is_fortran ()
21755 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21756 {
21757 /* DWARF 2 doesn't provide a way to identify a program's source-level
21758 entry point. DW_AT_calling_convention attributes are only meant
21759 to describe functions' calling conventions. However, lacking a
21760 better way to signal the Fortran main program, we used this for
21761 a long time, following existing custom. Now, DWARF 4 has
21762 DW_AT_main_subprogram, which we add below, but some tools still
21763 rely on the old way, which we thus keep. */
21764 value = DW_CC_program;
21765
21766 if (dwarf_version >= 4 || !dwarf_strict)
21767 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21768 }
21769
21770 /* Only add the attribute if the backend requests it, and
21771 is not DW_CC_normal. */
21772 if (value && (value != DW_CC_normal))
21773 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21774 }
21775
21776 /* Given a tree pointer to a struct, class, union, or enum type node, return
21777 a pointer to the (string) tag name for the given type, or zero if the type
21778 was declared without a tag. */
21779
21780 static const char *
21781 type_tag (const_tree type)
21782 {
21783 const char *name = 0;
21784
21785 if (TYPE_NAME (type) != 0)
21786 {
21787 tree t = 0;
21788
21789 /* Find the IDENTIFIER_NODE for the type name. */
21790 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21791 && !TYPE_NAMELESS (type))
21792 t = TYPE_NAME (type);
21793
21794 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21795 a TYPE_DECL node, regardless of whether or not a `typedef' was
21796 involved. */
21797 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21798 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21799 {
21800 /* We want to be extra verbose. Don't call dwarf_name if
21801 DECL_NAME isn't set. The default hook for decl_printable_name
21802 doesn't like that, and in this context it's correct to return
21803 0, instead of "<anonymous>" or the like. */
21804 if (DECL_NAME (TYPE_NAME (type))
21805 && !DECL_NAMELESS (TYPE_NAME (type)))
21806 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21807 }
21808
21809 /* Now get the name as a string, or invent one. */
21810 if (!name && t != 0)
21811 name = IDENTIFIER_POINTER (t);
21812 }
21813
21814 return (name == 0 || *name == '\0') ? 0 : name;
21815 }
21816
21817 /* Return the type associated with a data member, make a special check
21818 for bit field types. */
21819
21820 static inline tree
21821 member_declared_type (const_tree member)
21822 {
21823 return (DECL_BIT_FIELD_TYPE (member)
21824 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21825 }
21826
21827 /* Get the decl's label, as described by its RTL. This may be different
21828 from the DECL_NAME name used in the source file. */
21829
21830 #if 0
21831 static const char *
21832 decl_start_label (tree decl)
21833 {
21834 rtx x;
21835 const char *fnname;
21836
21837 x = DECL_RTL (decl);
21838 gcc_assert (MEM_P (x));
21839
21840 x = XEXP (x, 0);
21841 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21842
21843 fnname = XSTR (x, 0);
21844 return fnname;
21845 }
21846 #endif
21847 \f
21848 /* For variable-length arrays that have been previously generated, but
21849 may be incomplete due to missing subscript info, fill the subscript
21850 info. Return TRUE if this is one of those cases. */
21851 static bool
21852 fill_variable_array_bounds (tree type)
21853 {
21854 if (TREE_ASM_WRITTEN (type)
21855 && TREE_CODE (type) == ARRAY_TYPE
21856 && variably_modified_type_p (type, NULL))
21857 {
21858 dw_die_ref array_die = lookup_type_die (type);
21859 if (!array_die)
21860 return false;
21861 add_subscript_info (array_die, type, !is_ada ());
21862 return true;
21863 }
21864 return false;
21865 }
21866
21867 /* These routines generate the internal representation of the DIE's for
21868 the compilation unit. Debugging information is collected by walking
21869 the declaration trees passed in from dwarf2out_decl(). */
21870
21871 static void
21872 gen_array_type_die (tree type, dw_die_ref context_die)
21873 {
21874 dw_die_ref array_die;
21875
21876 /* GNU compilers represent multidimensional array types as sequences of one
21877 dimensional array types whose element types are themselves array types.
21878 We sometimes squish that down to a single array_type DIE with multiple
21879 subscripts in the Dwarf debugging info. The draft Dwarf specification
21880 say that we are allowed to do this kind of compression in C, because
21881 there is no difference between an array of arrays and a multidimensional
21882 array. We don't do this for Ada to remain as close as possible to the
21883 actual representation, which is especially important against the language
21884 flexibilty wrt arrays of variable size. */
21885
21886 bool collapse_nested_arrays = !is_ada ();
21887
21888 if (fill_variable_array_bounds (type))
21889 return;
21890
21891 dw_die_ref scope_die = scope_die_for (type, context_die);
21892 tree element_type;
21893
21894 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21895 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21896 if (TREE_CODE (type) == ARRAY_TYPE
21897 && TYPE_STRING_FLAG (type)
21898 && is_fortran ()
21899 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21900 {
21901 HOST_WIDE_INT size;
21902
21903 array_die = new_die (DW_TAG_string_type, scope_die, type);
21904 add_name_attribute (array_die, type_tag (type));
21905 equate_type_number_to_die (type, array_die);
21906 size = int_size_in_bytes (type);
21907 if (size >= 0)
21908 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21909 /* ??? We can't annotate types late, but for LTO we may not
21910 generate a location early either (gfortran.dg/save_6.f90). */
21911 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21912 && TYPE_DOMAIN (type) != NULL_TREE
21913 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21914 {
21915 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21916 tree rszdecl = szdecl;
21917
21918 size = int_size_in_bytes (TREE_TYPE (szdecl));
21919 if (!DECL_P (szdecl))
21920 {
21921 if (TREE_CODE (szdecl) == INDIRECT_REF
21922 && DECL_P (TREE_OPERAND (szdecl, 0)))
21923 {
21924 rszdecl = TREE_OPERAND (szdecl, 0);
21925 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21926 != DWARF2_ADDR_SIZE)
21927 size = 0;
21928 }
21929 else
21930 size = 0;
21931 }
21932 if (size > 0)
21933 {
21934 dw_loc_list_ref loc
21935 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21936 NULL);
21937 if (loc)
21938 {
21939 add_AT_location_description (array_die, DW_AT_string_length,
21940 loc);
21941 if (size != DWARF2_ADDR_SIZE)
21942 add_AT_unsigned (array_die, dwarf_version >= 5
21943 ? DW_AT_string_length_byte_size
21944 : DW_AT_byte_size, size);
21945 }
21946 }
21947 }
21948 return;
21949 }
21950
21951 array_die = new_die (DW_TAG_array_type, scope_die, type);
21952 add_name_attribute (array_die, type_tag (type));
21953 equate_type_number_to_die (type, array_die);
21954
21955 if (TREE_CODE (type) == VECTOR_TYPE)
21956 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21957
21958 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21959 if (is_fortran ()
21960 && TREE_CODE (type) == ARRAY_TYPE
21961 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21962 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21963 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21964
21965 #if 0
21966 /* We default the array ordering. Debuggers will probably do the right
21967 things even if DW_AT_ordering is not present. It's not even an issue
21968 until we start to get into multidimensional arrays anyway. If a debugger
21969 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21970 then we'll have to put the DW_AT_ordering attribute back in. (But if
21971 and when we find out that we need to put these in, we will only do so
21972 for multidimensional arrays. */
21973 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21974 #endif
21975
21976 if (TREE_CODE (type) == VECTOR_TYPE)
21977 {
21978 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21979 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21980 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21981 add_bound_info (subrange_die, DW_AT_upper_bound,
21982 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21983 }
21984 else
21985 add_subscript_info (array_die, type, collapse_nested_arrays);
21986
21987 /* Add representation of the type of the elements of this array type and
21988 emit the corresponding DIE if we haven't done it already. */
21989 element_type = TREE_TYPE (type);
21990 if (collapse_nested_arrays)
21991 while (TREE_CODE (element_type) == ARRAY_TYPE)
21992 {
21993 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21994 break;
21995 element_type = TREE_TYPE (element_type);
21996 }
21997
21998 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21999 TREE_CODE (type) == ARRAY_TYPE
22000 && TYPE_REVERSE_STORAGE_ORDER (type),
22001 context_die);
22002
22003 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22004 if (TYPE_ARTIFICIAL (type))
22005 add_AT_flag (array_die, DW_AT_artificial, 1);
22006
22007 if (get_AT (array_die, DW_AT_name))
22008 add_pubtype (type, array_die);
22009
22010 add_alignment_attribute (array_die, type);
22011 }
22012
22013 /* This routine generates DIE for array with hidden descriptor, details
22014 are filled into *info by a langhook. */
22015
22016 static void
22017 gen_descr_array_type_die (tree type, struct array_descr_info *info,
22018 dw_die_ref context_die)
22019 {
22020 const dw_die_ref scope_die = scope_die_for (type, context_die);
22021 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
22022 struct loc_descr_context context = { type, info->base_decl, NULL,
22023 false, false };
22024 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
22025 int dim;
22026
22027 add_name_attribute (array_die, type_tag (type));
22028 equate_type_number_to_die (type, array_die);
22029
22030 if (info->ndimensions > 1)
22031 switch (info->ordering)
22032 {
22033 case array_descr_ordering_row_major:
22034 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
22035 break;
22036 case array_descr_ordering_column_major:
22037 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
22038 break;
22039 default:
22040 break;
22041 }
22042
22043 if (dwarf_version >= 3 || !dwarf_strict)
22044 {
22045 if (info->data_location)
22046 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
22047 dw_scalar_form_exprloc, &context);
22048 if (info->associated)
22049 add_scalar_info (array_die, DW_AT_associated, info->associated,
22050 dw_scalar_form_constant
22051 | dw_scalar_form_exprloc
22052 | dw_scalar_form_reference, &context);
22053 if (info->allocated)
22054 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
22055 dw_scalar_form_constant
22056 | dw_scalar_form_exprloc
22057 | dw_scalar_form_reference, &context);
22058 if (info->stride)
22059 {
22060 const enum dwarf_attribute attr
22061 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22062 const int forms
22063 = (info->stride_in_bits)
22064 ? dw_scalar_form_constant
22065 : (dw_scalar_form_constant
22066 | dw_scalar_form_exprloc
22067 | dw_scalar_form_reference);
22068
22069 add_scalar_info (array_die, attr, info->stride, forms, &context);
22070 }
22071 }
22072 if (dwarf_version >= 5)
22073 {
22074 if (info->rank)
22075 {
22076 add_scalar_info (array_die, DW_AT_rank, info->rank,
22077 dw_scalar_form_constant
22078 | dw_scalar_form_exprloc, &context);
22079 subrange_tag = DW_TAG_generic_subrange;
22080 context.placeholder_arg = true;
22081 }
22082 }
22083
22084 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22085
22086 for (dim = 0; dim < info->ndimensions; dim++)
22087 {
22088 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22089
22090 if (info->dimen[dim].bounds_type)
22091 add_type_attribute (subrange_die,
22092 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22093 false, context_die);
22094 if (info->dimen[dim].lower_bound)
22095 add_bound_info (subrange_die, DW_AT_lower_bound,
22096 info->dimen[dim].lower_bound, &context);
22097 if (info->dimen[dim].upper_bound)
22098 add_bound_info (subrange_die, DW_AT_upper_bound,
22099 info->dimen[dim].upper_bound, &context);
22100 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22101 add_scalar_info (subrange_die, DW_AT_byte_stride,
22102 info->dimen[dim].stride,
22103 dw_scalar_form_constant
22104 | dw_scalar_form_exprloc
22105 | dw_scalar_form_reference,
22106 &context);
22107 }
22108
22109 gen_type_die (info->element_type, context_die);
22110 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22111 TREE_CODE (type) == ARRAY_TYPE
22112 && TYPE_REVERSE_STORAGE_ORDER (type),
22113 context_die);
22114
22115 if (get_AT (array_die, DW_AT_name))
22116 add_pubtype (type, array_die);
22117
22118 add_alignment_attribute (array_die, type);
22119 }
22120
22121 #if 0
22122 static void
22123 gen_entry_point_die (tree decl, dw_die_ref context_die)
22124 {
22125 tree origin = decl_ultimate_origin (decl);
22126 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22127
22128 if (origin != NULL)
22129 add_abstract_origin_attribute (decl_die, origin);
22130 else
22131 {
22132 add_name_and_src_coords_attributes (decl_die, decl);
22133 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22134 TYPE_UNQUALIFIED, false, context_die);
22135 }
22136
22137 if (DECL_ABSTRACT_P (decl))
22138 equate_decl_number_to_die (decl, decl_die);
22139 else
22140 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22141 }
22142 #endif
22143
22144 /* Walk through the list of incomplete types again, trying once more to
22145 emit full debugging info for them. */
22146
22147 static void
22148 retry_incomplete_types (void)
22149 {
22150 set_early_dwarf s;
22151 int i;
22152
22153 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22154 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22155 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22156 vec_safe_truncate (incomplete_types, 0);
22157 }
22158
22159 /* Determine what tag to use for a record type. */
22160
22161 static enum dwarf_tag
22162 record_type_tag (tree type)
22163 {
22164 if (! lang_hooks.types.classify_record)
22165 return DW_TAG_structure_type;
22166
22167 switch (lang_hooks.types.classify_record (type))
22168 {
22169 case RECORD_IS_STRUCT:
22170 return DW_TAG_structure_type;
22171
22172 case RECORD_IS_CLASS:
22173 return DW_TAG_class_type;
22174
22175 case RECORD_IS_INTERFACE:
22176 if (dwarf_version >= 3 || !dwarf_strict)
22177 return DW_TAG_interface_type;
22178 return DW_TAG_structure_type;
22179
22180 default:
22181 gcc_unreachable ();
22182 }
22183 }
22184
22185 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22186 include all of the information about the enumeration values also. Each
22187 enumerated type name/value is listed as a child of the enumerated type
22188 DIE. */
22189
22190 static dw_die_ref
22191 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22192 {
22193 dw_die_ref type_die = lookup_type_die (type);
22194 dw_die_ref orig_type_die = type_die;
22195
22196 if (type_die == NULL)
22197 {
22198 type_die = new_die (DW_TAG_enumeration_type,
22199 scope_die_for (type, context_die), type);
22200 equate_type_number_to_die (type, type_die);
22201 add_name_attribute (type_die, type_tag (type));
22202 if ((dwarf_version >= 4 || !dwarf_strict)
22203 && ENUM_IS_SCOPED (type))
22204 add_AT_flag (type_die, DW_AT_enum_class, 1);
22205 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22206 add_AT_flag (type_die, DW_AT_declaration, 1);
22207 if (!dwarf_strict)
22208 add_AT_unsigned (type_die, DW_AT_encoding,
22209 TYPE_UNSIGNED (type)
22210 ? DW_ATE_unsigned
22211 : DW_ATE_signed);
22212 }
22213 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22214 return type_die;
22215 else
22216 remove_AT (type_die, DW_AT_declaration);
22217
22218 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22219 given enum type is incomplete, do not generate the DW_AT_byte_size
22220 attribute or the DW_AT_element_list attribute. */
22221 if (TYPE_SIZE (type))
22222 {
22223 tree link;
22224
22225 if (!ENUM_IS_OPAQUE (type))
22226 TREE_ASM_WRITTEN (type) = 1;
22227 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22228 add_byte_size_attribute (type_die, type);
22229 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22230 add_alignment_attribute (type_die, type);
22231 if ((dwarf_version >= 3 || !dwarf_strict)
22232 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22233 {
22234 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22235 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22236 context_die);
22237 }
22238 if (TYPE_STUB_DECL (type) != NULL_TREE)
22239 {
22240 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22241 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22242 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22243 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22244 }
22245
22246 /* If the first reference to this type was as the return type of an
22247 inline function, then it may not have a parent. Fix this now. */
22248 if (type_die->die_parent == NULL)
22249 add_child_die (scope_die_for (type, context_die), type_die);
22250
22251 for (link = TYPE_VALUES (type);
22252 link != NULL; link = TREE_CHAIN (link))
22253 {
22254 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22255 tree value = TREE_VALUE (link);
22256
22257 if (DECL_P (value))
22258 equate_decl_number_to_die (value, enum_die);
22259
22260 gcc_assert (!ENUM_IS_OPAQUE (type));
22261 add_name_attribute (enum_die,
22262 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22263
22264 if (TREE_CODE (value) == CONST_DECL)
22265 value = DECL_INITIAL (value);
22266
22267 if (simple_type_size_in_bits (TREE_TYPE (value))
22268 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22269 {
22270 /* For constant forms created by add_AT_unsigned DWARF
22271 consumers (GDB, elfutils, etc.) always zero extend
22272 the value. Only when the actual value is negative
22273 do we need to use add_AT_int to generate a constant
22274 form that can represent negative values. */
22275 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22276 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22277 add_AT_unsigned (enum_die, DW_AT_const_value,
22278 (unsigned HOST_WIDE_INT) val);
22279 else
22280 add_AT_int (enum_die, DW_AT_const_value, val);
22281 }
22282 else
22283 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22284 that here. TODO: This should be re-worked to use correct
22285 signed/unsigned double tags for all cases. */
22286 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22287 }
22288
22289 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22290 if (TYPE_ARTIFICIAL (type)
22291 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22292 add_AT_flag (type_die, DW_AT_artificial, 1);
22293 }
22294 else
22295 add_AT_flag (type_die, DW_AT_declaration, 1);
22296
22297 add_pubtype (type, type_die);
22298
22299 return type_die;
22300 }
22301
22302 /* Generate a DIE to represent either a real live formal parameter decl or to
22303 represent just the type of some formal parameter position in some function
22304 type.
22305
22306 Note that this routine is a bit unusual because its argument may be a
22307 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22308 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22309 node. If it's the former then this function is being called to output a
22310 DIE to represent a formal parameter object (or some inlining thereof). If
22311 it's the latter, then this function is only being called to output a
22312 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22313 argument type of some subprogram type.
22314 If EMIT_NAME_P is true, name and source coordinate attributes
22315 are emitted. */
22316
22317 static dw_die_ref
22318 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22319 dw_die_ref context_die)
22320 {
22321 tree node_or_origin = node ? node : origin;
22322 tree ultimate_origin;
22323 dw_die_ref parm_die = NULL;
22324
22325 if (DECL_P (node_or_origin))
22326 {
22327 parm_die = lookup_decl_die (node);
22328
22329 /* If the contexts differ, we may not be talking about the same
22330 thing.
22331 ??? When in LTO the DIE parent is the "abstract" copy and the
22332 context_die is the specification "copy". */
22333 if (parm_die
22334 && parm_die->die_parent != context_die
22335 && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack
22336 || parm_die->die_parent->die_parent != context_die)
22337 && !in_lto_p)
22338 {
22339 gcc_assert (!DECL_ABSTRACT_P (node));
22340 /* This can happen when creating a concrete instance, in
22341 which case we need to create a new DIE that will get
22342 annotated with DW_AT_abstract_origin. */
22343 parm_die = NULL;
22344 }
22345
22346 if (parm_die && parm_die->die_parent == NULL)
22347 {
22348 /* Check that parm_die already has the right attributes that
22349 we would have added below. If any attributes are
22350 missing, fall through to add them. */
22351 if (! DECL_ABSTRACT_P (node_or_origin)
22352 && !get_AT (parm_die, DW_AT_location)
22353 && !get_AT (parm_die, DW_AT_const_value))
22354 /* We are missing location info, and are about to add it. */
22355 ;
22356 else
22357 {
22358 add_child_die (context_die, parm_die);
22359 return parm_die;
22360 }
22361 }
22362 }
22363
22364 /* If we have a previously generated DIE, use it, unless this is an
22365 concrete instance (origin != NULL), in which case we need a new
22366 DIE with a corresponding DW_AT_abstract_origin. */
22367 bool reusing_die;
22368 if (parm_die && origin == NULL)
22369 reusing_die = true;
22370 else
22371 {
22372 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22373 reusing_die = false;
22374 }
22375
22376 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22377 {
22378 case tcc_declaration:
22379 ultimate_origin = decl_ultimate_origin (node_or_origin);
22380 if (node || ultimate_origin)
22381 origin = ultimate_origin;
22382
22383 if (reusing_die)
22384 goto add_location;
22385
22386 if (origin != NULL)
22387 add_abstract_origin_attribute (parm_die, origin);
22388 else if (emit_name_p)
22389 add_name_and_src_coords_attributes (parm_die, node);
22390 if (origin == NULL
22391 || (! DECL_ABSTRACT_P (node_or_origin)
22392 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22393 decl_function_context
22394 (node_or_origin))))
22395 {
22396 tree type = TREE_TYPE (node_or_origin);
22397 if (decl_by_reference_p (node_or_origin))
22398 add_type_attribute (parm_die, TREE_TYPE (type),
22399 TYPE_UNQUALIFIED,
22400 false, context_die);
22401 else
22402 add_type_attribute (parm_die, type,
22403 decl_quals (node_or_origin),
22404 false, context_die);
22405 }
22406 if (origin == NULL && DECL_ARTIFICIAL (node))
22407 add_AT_flag (parm_die, DW_AT_artificial, 1);
22408 add_location:
22409 if (node && node != origin)
22410 equate_decl_number_to_die (node, parm_die);
22411 if (! DECL_ABSTRACT_P (node_or_origin))
22412 add_location_or_const_value_attribute (parm_die, node_or_origin,
22413 node == NULL);
22414
22415 break;
22416
22417 case tcc_type:
22418 /* We were called with some kind of a ..._TYPE node. */
22419 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22420 context_die);
22421 break;
22422
22423 default:
22424 gcc_unreachable ();
22425 }
22426
22427 return parm_die;
22428 }
22429
22430 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22431 children DW_TAG_formal_parameter DIEs representing the arguments of the
22432 parameter pack.
22433
22434 PARM_PACK must be a function parameter pack.
22435 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22436 must point to the subsequent arguments of the function PACK_ARG belongs to.
22437 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22438 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22439 following the last one for which a DIE was generated. */
22440
22441 static dw_die_ref
22442 gen_formal_parameter_pack_die (tree parm_pack,
22443 tree pack_arg,
22444 dw_die_ref subr_die,
22445 tree *next_arg)
22446 {
22447 tree arg;
22448 dw_die_ref parm_pack_die;
22449
22450 gcc_assert (parm_pack
22451 && lang_hooks.function_parameter_pack_p (parm_pack)
22452 && subr_die);
22453
22454 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22455 add_src_coords_attributes (parm_pack_die, parm_pack);
22456
22457 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22458 {
22459 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22460 parm_pack))
22461 break;
22462 gen_formal_parameter_die (arg, NULL,
22463 false /* Don't emit name attribute. */,
22464 parm_pack_die);
22465 }
22466 if (next_arg)
22467 *next_arg = arg;
22468 return parm_pack_die;
22469 }
22470
22471 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22472 at the end of an (ANSI prototyped) formal parameters list. */
22473
22474 static void
22475 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22476 {
22477 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22478 }
22479
22480 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22481 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22482 parameters as specified in some function type specification (except for
22483 those which appear as part of a function *definition*). */
22484
22485 static void
22486 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22487 {
22488 tree link;
22489 tree formal_type = NULL;
22490 tree first_parm_type;
22491 tree arg;
22492
22493 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22494 {
22495 arg = DECL_ARGUMENTS (function_or_method_type);
22496 function_or_method_type = TREE_TYPE (function_or_method_type);
22497 }
22498 else
22499 arg = NULL_TREE;
22500
22501 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22502
22503 /* Make our first pass over the list of formal parameter types and output a
22504 DW_TAG_formal_parameter DIE for each one. */
22505 for (link = first_parm_type; link; )
22506 {
22507 dw_die_ref parm_die;
22508
22509 formal_type = TREE_VALUE (link);
22510 if (formal_type == void_type_node)
22511 break;
22512
22513 /* Output a (nameless) DIE to represent the formal parameter itself. */
22514 parm_die = gen_formal_parameter_die (formal_type, NULL,
22515 true /* Emit name attribute. */,
22516 context_die);
22517 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22518 && link == first_parm_type)
22519 {
22520 add_AT_flag (parm_die, DW_AT_artificial, 1);
22521 if (dwarf_version >= 3 || !dwarf_strict)
22522 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22523 }
22524 else if (arg && DECL_ARTIFICIAL (arg))
22525 add_AT_flag (parm_die, DW_AT_artificial, 1);
22526
22527 link = TREE_CHAIN (link);
22528 if (arg)
22529 arg = DECL_CHAIN (arg);
22530 }
22531
22532 /* If this function type has an ellipsis, add a
22533 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22534 if (formal_type != void_type_node)
22535 gen_unspecified_parameters_die (function_or_method_type, context_die);
22536
22537 /* Make our second (and final) pass over the list of formal parameter types
22538 and output DIEs to represent those types (as necessary). */
22539 for (link = TYPE_ARG_TYPES (function_or_method_type);
22540 link && TREE_VALUE (link);
22541 link = TREE_CHAIN (link))
22542 gen_type_die (TREE_VALUE (link), context_die);
22543 }
22544
22545 /* We want to generate the DIE for TYPE so that we can generate the
22546 die for MEMBER, which has been defined; we will need to refer back
22547 to the member declaration nested within TYPE. If we're trying to
22548 generate minimal debug info for TYPE, processing TYPE won't do the
22549 trick; we need to attach the member declaration by hand. */
22550
22551 static void
22552 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22553 {
22554 gen_type_die (type, context_die);
22555
22556 /* If we're trying to avoid duplicate debug info, we may not have
22557 emitted the member decl for this function. Emit it now. */
22558 if (TYPE_STUB_DECL (type)
22559 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22560 && ! lookup_decl_die (member))
22561 {
22562 dw_die_ref type_die;
22563 gcc_assert (!decl_ultimate_origin (member));
22564
22565 type_die = lookup_type_die_strip_naming_typedef (type);
22566 if (TREE_CODE (member) == FUNCTION_DECL)
22567 gen_subprogram_die (member, type_die);
22568 else if (TREE_CODE (member) == FIELD_DECL)
22569 {
22570 /* Ignore the nameless fields that are used to skip bits but handle
22571 C++ anonymous unions and structs. */
22572 if (DECL_NAME (member) != NULL_TREE
22573 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22574 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22575 {
22576 struct vlr_context vlr_ctx = {
22577 DECL_CONTEXT (member), /* struct_type */
22578 NULL_TREE /* variant_part_offset */
22579 };
22580 gen_type_die (member_declared_type (member), type_die);
22581 gen_field_die (member, &vlr_ctx, type_die);
22582 }
22583 }
22584 else
22585 gen_variable_die (member, NULL_TREE, type_die);
22586 }
22587 }
22588 \f
22589 /* Forward declare these functions, because they are mutually recursive
22590 with their set_block_* pairing functions. */
22591 static void set_decl_origin_self (tree);
22592
22593 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22594 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22595 that it points to the node itself, thus indicating that the node is its
22596 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22597 the given node is NULL, recursively descend the decl/block tree which
22598 it is the root of, and for each other ..._DECL or BLOCK node contained
22599 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22600 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22601 values to point to themselves. */
22602
22603 static void
22604 set_block_origin_self (tree stmt)
22605 {
22606 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22607 {
22608 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22609
22610 {
22611 tree local_decl;
22612
22613 for (local_decl = BLOCK_VARS (stmt);
22614 local_decl != NULL_TREE;
22615 local_decl = DECL_CHAIN (local_decl))
22616 /* Do not recurse on nested functions since the inlining status
22617 of parent and child can be different as per the DWARF spec. */
22618 if (TREE_CODE (local_decl) != FUNCTION_DECL
22619 && !DECL_EXTERNAL (local_decl))
22620 set_decl_origin_self (local_decl);
22621 }
22622
22623 {
22624 tree subblock;
22625
22626 for (subblock = BLOCK_SUBBLOCKS (stmt);
22627 subblock != NULL_TREE;
22628 subblock = BLOCK_CHAIN (subblock))
22629 set_block_origin_self (subblock); /* Recurse. */
22630 }
22631 }
22632 }
22633
22634 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22635 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22636 node to so that it points to the node itself, thus indicating that the
22637 node represents its own (abstract) origin. Additionally, if the
22638 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22639 the decl/block tree of which the given node is the root of, and for
22640 each other ..._DECL or BLOCK node contained therein whose
22641 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22642 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22643 point to themselves. */
22644
22645 static void
22646 set_decl_origin_self (tree decl)
22647 {
22648 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22649 {
22650 DECL_ABSTRACT_ORIGIN (decl) = decl;
22651 if (TREE_CODE (decl) == FUNCTION_DECL)
22652 {
22653 tree arg;
22654
22655 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22656 DECL_ABSTRACT_ORIGIN (arg) = arg;
22657 if (DECL_INITIAL (decl) != NULL_TREE
22658 && DECL_INITIAL (decl) != error_mark_node)
22659 set_block_origin_self (DECL_INITIAL (decl));
22660 }
22661 }
22662 }
22663 \f
22664 /* Mark the early DIE for DECL as the abstract instance. */
22665
22666 static void
22667 dwarf2out_abstract_function (tree decl)
22668 {
22669 dw_die_ref old_die;
22670
22671 /* Make sure we have the actual abstract inline, not a clone. */
22672 decl = DECL_ORIGIN (decl);
22673
22674 if (DECL_IGNORED_P (decl))
22675 return;
22676
22677 /* In LTO we're all set. We already created abstract instances
22678 early and we want to avoid creating a concrete instance of that
22679 if we don't output it. */
22680 if (in_lto_p)
22681 return;
22682
22683 old_die = lookup_decl_die (decl);
22684 gcc_assert (old_die != NULL);
22685 if (get_AT (old_die, DW_AT_inline))
22686 /* We've already generated the abstract instance. */
22687 return;
22688
22689 /* Go ahead and put DW_AT_inline on the DIE. */
22690 if (DECL_DECLARED_INLINE_P (decl))
22691 {
22692 if (cgraph_function_possibly_inlined_p (decl))
22693 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22694 else
22695 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22696 }
22697 else
22698 {
22699 if (cgraph_function_possibly_inlined_p (decl))
22700 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22701 else
22702 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22703 }
22704
22705 if (DECL_DECLARED_INLINE_P (decl)
22706 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22707 add_AT_flag (old_die, DW_AT_artificial, 1);
22708
22709 set_decl_origin_self (decl);
22710 }
22711
22712 /* Helper function of premark_used_types() which gets called through
22713 htab_traverse.
22714
22715 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22716 marked as unused by prune_unused_types. */
22717
22718 bool
22719 premark_used_types_helper (tree const &type, void *)
22720 {
22721 dw_die_ref die;
22722
22723 die = lookup_type_die (type);
22724 if (die != NULL)
22725 die->die_perennial_p = 1;
22726 return true;
22727 }
22728
22729 /* Helper function of premark_types_used_by_global_vars which gets called
22730 through htab_traverse.
22731
22732 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22733 marked as unused by prune_unused_types. The DIE of the type is marked
22734 only if the global variable using the type will actually be emitted. */
22735
22736 int
22737 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22738 void *)
22739 {
22740 struct types_used_by_vars_entry *entry;
22741 dw_die_ref die;
22742
22743 entry = (struct types_used_by_vars_entry *) *slot;
22744 gcc_assert (entry->type != NULL
22745 && entry->var_decl != NULL);
22746 die = lookup_type_die (entry->type);
22747 if (die)
22748 {
22749 /* Ask cgraph if the global variable really is to be emitted.
22750 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22751 varpool_node *node = varpool_node::get (entry->var_decl);
22752 if (node && node->definition)
22753 {
22754 die->die_perennial_p = 1;
22755 /* Keep the parent DIEs as well. */
22756 while ((die = die->die_parent) && die->die_perennial_p == 0)
22757 die->die_perennial_p = 1;
22758 }
22759 }
22760 return 1;
22761 }
22762
22763 /* Mark all members of used_types_hash as perennial. */
22764
22765 static void
22766 premark_used_types (struct function *fun)
22767 {
22768 if (fun && fun->used_types_hash)
22769 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22770 }
22771
22772 /* Mark all members of types_used_by_vars_entry as perennial. */
22773
22774 static void
22775 premark_types_used_by_global_vars (void)
22776 {
22777 if (types_used_by_vars_hash)
22778 types_used_by_vars_hash
22779 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22780 }
22781
22782 /* Mark all variables used by the symtab as perennial. */
22783
22784 static void
22785 premark_used_variables (void)
22786 {
22787 /* Mark DIEs in the symtab as used. */
22788 varpool_node *var;
22789 FOR_EACH_VARIABLE (var)
22790 {
22791 dw_die_ref die = lookup_decl_die (var->decl);
22792 if (die)
22793 die->die_perennial_p = 1;
22794 }
22795 }
22796
22797 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22798 for CA_LOC call arg loc node. */
22799
22800 static dw_die_ref
22801 gen_call_site_die (tree decl, dw_die_ref subr_die,
22802 struct call_arg_loc_node *ca_loc)
22803 {
22804 dw_die_ref stmt_die = NULL, die;
22805 tree block = ca_loc->block;
22806
22807 while (block
22808 && block != DECL_INITIAL (decl)
22809 && TREE_CODE (block) == BLOCK)
22810 {
22811 stmt_die = lookup_block_die (block);
22812 if (stmt_die)
22813 break;
22814 block = BLOCK_SUPERCONTEXT (block);
22815 }
22816 if (stmt_die == NULL)
22817 stmt_die = subr_die;
22818 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22819 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22820 if (ca_loc->tail_call_p)
22821 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22822 if (ca_loc->symbol_ref)
22823 {
22824 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22825 if (tdie)
22826 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22827 else
22828 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22829 false);
22830 }
22831 return die;
22832 }
22833
22834 /* Generate a DIE to represent a declared function (either file-scope or
22835 block-local). */
22836
22837 static void
22838 gen_subprogram_die (tree decl, dw_die_ref context_die)
22839 {
22840 tree origin = decl_ultimate_origin (decl);
22841 dw_die_ref subr_die;
22842 dw_die_ref old_die = lookup_decl_die (decl);
22843 bool old_die_had_no_children = false;
22844
22845 /* This function gets called multiple times for different stages of
22846 the debug process. For example, for func() in this code:
22847
22848 namespace S
22849 {
22850 void func() { ... }
22851 }
22852
22853 ...we get called 4 times. Twice in early debug and twice in
22854 late debug:
22855
22856 Early debug
22857 -----------
22858
22859 1. Once while generating func() within the namespace. This is
22860 the declaration. The declaration bit below is set, as the
22861 context is the namespace.
22862
22863 A new DIE will be generated with DW_AT_declaration set.
22864
22865 2. Once for func() itself. This is the specification. The
22866 declaration bit below is clear as the context is the CU.
22867
22868 We will use the cached DIE from (1) to create a new DIE with
22869 DW_AT_specification pointing to the declaration in (1).
22870
22871 Late debug via rest_of_handle_final()
22872 -------------------------------------
22873
22874 3. Once generating func() within the namespace. This is also the
22875 declaration, as in (1), but this time we will early exit below
22876 as we have a cached DIE and a declaration needs no additional
22877 annotations (no locations), as the source declaration line
22878 info is enough.
22879
22880 4. Once for func() itself. As in (2), this is the specification,
22881 but this time we will re-use the cached DIE, and just annotate
22882 it with the location information that should now be available.
22883
22884 For something without namespaces, but with abstract instances, we
22885 are also called a multiple times:
22886
22887 class Base
22888 {
22889 public:
22890 Base (); // constructor declaration (1)
22891 };
22892
22893 Base::Base () { } // constructor specification (2)
22894
22895 Early debug
22896 -----------
22897
22898 1. Once for the Base() constructor by virtue of it being a
22899 member of the Base class. This is done via
22900 rest_of_type_compilation.
22901
22902 This is a declaration, so a new DIE will be created with
22903 DW_AT_declaration.
22904
22905 2. Once for the Base() constructor definition, but this time
22906 while generating the abstract instance of the base
22907 constructor (__base_ctor) which is being generated via early
22908 debug of reachable functions.
22909
22910 Even though we have a cached version of the declaration (1),
22911 we will create a DW_AT_specification of the declaration DIE
22912 in (1).
22913
22914 3. Once for the __base_ctor itself, but this time, we generate
22915 an DW_AT_abstract_origin version of the DW_AT_specification in
22916 (2).
22917
22918 Late debug via rest_of_handle_final
22919 -----------------------------------
22920
22921 4. One final time for the __base_ctor (which will have a cached
22922 DIE with DW_AT_abstract_origin created in (3). This time,
22923 we will just annotate the location information now
22924 available.
22925 */
22926 int declaration = (current_function_decl != decl
22927 || (!DECL_INITIAL (decl) && !origin)
22928 || class_or_namespace_scope_p (context_die));
22929
22930 /* A declaration that has been previously dumped needs no
22931 additional information. */
22932 if (old_die && declaration)
22933 return;
22934
22935 if (in_lto_p && old_die && old_die->die_child == NULL)
22936 old_die_had_no_children = true;
22937
22938 /* Now that the C++ front end lazily declares artificial member fns, we
22939 might need to retrofit the declaration into its class. */
22940 if (!declaration && !origin && !old_die
22941 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22942 && !class_or_namespace_scope_p (context_die)
22943 && debug_info_level > DINFO_LEVEL_TERSE)
22944 old_die = force_decl_die (decl);
22945
22946 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22947 if (origin != NULL)
22948 {
22949 gcc_assert (!declaration || local_scope_p (context_die));
22950
22951 /* Fixup die_parent for the abstract instance of a nested
22952 inline function. */
22953 if (old_die && old_die->die_parent == NULL)
22954 add_child_die (context_die, old_die);
22955
22956 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22957 {
22958 /* If we have a DW_AT_abstract_origin we have a working
22959 cached version. */
22960 subr_die = old_die;
22961 }
22962 else
22963 {
22964 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22965 add_abstract_origin_attribute (subr_die, origin);
22966 /* This is where the actual code for a cloned function is.
22967 Let's emit linkage name attribute for it. This helps
22968 debuggers to e.g, set breakpoints into
22969 constructors/destructors when the user asks "break
22970 K::K". */
22971 add_linkage_name (subr_die, decl);
22972 }
22973 }
22974 /* A cached copy, possibly from early dwarf generation. Reuse as
22975 much as possible. */
22976 else if (old_die)
22977 {
22978 if (!get_AT_flag (old_die, DW_AT_declaration)
22979 /* We can have a normal definition following an inline one in the
22980 case of redefinition of GNU C extern inlines.
22981 It seems reasonable to use AT_specification in this case. */
22982 && !get_AT (old_die, DW_AT_inline))
22983 {
22984 /* Detect and ignore this case, where we are trying to output
22985 something we have already output. */
22986 if (get_AT (old_die, DW_AT_low_pc)
22987 || get_AT (old_die, DW_AT_ranges))
22988 return;
22989
22990 /* If we have no location information, this must be a
22991 partially generated DIE from early dwarf generation.
22992 Fall through and generate it. */
22993 }
22994
22995 /* If the definition comes from the same place as the declaration,
22996 maybe use the old DIE. We always want the DIE for this function
22997 that has the *_pc attributes to be under comp_unit_die so the
22998 debugger can find it. We also need to do this for abstract
22999 instances of inlines, since the spec requires the out-of-line copy
23000 to have the same parent. For local class methods, this doesn't
23001 apply; we just use the old DIE. */
23002 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23003 struct dwarf_file_data * file_index = lookup_filename (s.file);
23004 if (((is_unit_die (old_die->die_parent)
23005 /* This condition fixes the inconsistency/ICE with the
23006 following Fortran test (or some derivative thereof) while
23007 building libgfortran:
23008
23009 module some_m
23010 contains
23011 logical function funky (FLAG)
23012 funky = .true.
23013 end function
23014 end module
23015 */
23016 || (old_die->die_parent
23017 && old_die->die_parent->die_tag == DW_TAG_module)
23018 || local_scope_p (old_die->die_parent)
23019 || context_die == NULL)
23020 && (DECL_ARTIFICIAL (decl)
23021 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
23022 && (get_AT_unsigned (old_die, DW_AT_decl_line)
23023 == (unsigned) s.line)
23024 && (!debug_column_info
23025 || s.column == 0
23026 || (get_AT_unsigned (old_die, DW_AT_decl_column)
23027 == (unsigned) s.column)))))
23028 /* With LTO if there's an abstract instance for
23029 the old DIE, this is a concrete instance and
23030 thus re-use the DIE. */
23031 || get_AT (old_die, DW_AT_abstract_origin))
23032 {
23033 subr_die = old_die;
23034
23035 /* Clear out the declaration attribute, but leave the
23036 parameters so they can be augmented with location
23037 information later. Unless this was a declaration, in
23038 which case, wipe out the nameless parameters and recreate
23039 them further down. */
23040 if (remove_AT (subr_die, DW_AT_declaration))
23041 {
23042
23043 remove_AT (subr_die, DW_AT_object_pointer);
23044 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
23045 }
23046 }
23047 /* Make a specification pointing to the previously built
23048 declaration. */
23049 else
23050 {
23051 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23052 add_AT_specification (subr_die, old_die);
23053 add_pubname (decl, subr_die);
23054 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23055 add_AT_file (subr_die, DW_AT_decl_file, file_index);
23056 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23057 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
23058 if (debug_column_info
23059 && s.column
23060 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23061 != (unsigned) s.column))
23062 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
23063
23064 /* If the prototype had an 'auto' or 'decltype(auto)' in
23065 the return type, emit the real type on the definition die. */
23066 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
23067 {
23068 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23069 while (die
23070 && (die->die_tag == DW_TAG_reference_type
23071 || die->die_tag == DW_TAG_rvalue_reference_type
23072 || die->die_tag == DW_TAG_pointer_type
23073 || die->die_tag == DW_TAG_const_type
23074 || die->die_tag == DW_TAG_volatile_type
23075 || die->die_tag == DW_TAG_restrict_type
23076 || die->die_tag == DW_TAG_array_type
23077 || die->die_tag == DW_TAG_ptr_to_member_type
23078 || die->die_tag == DW_TAG_subroutine_type))
23079 die = get_AT_ref (die, DW_AT_type);
23080 if (die == auto_die || die == decltype_auto_die)
23081 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23082 TYPE_UNQUALIFIED, false, context_die);
23083 }
23084
23085 /* When we process the method declaration, we haven't seen
23086 the out-of-class defaulted definition yet, so we have to
23087 recheck now. */
23088 if ((dwarf_version >= 5 || ! dwarf_strict)
23089 && !get_AT (subr_die, DW_AT_defaulted))
23090 {
23091 int defaulted
23092 = lang_hooks.decls.decl_dwarf_attribute (decl,
23093 DW_AT_defaulted);
23094 if (defaulted != -1)
23095 {
23096 /* Other values must have been handled before. */
23097 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23098 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23099 }
23100 }
23101 }
23102 }
23103 /* Create a fresh DIE for anything else. */
23104 else
23105 {
23106 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23107
23108 if (TREE_PUBLIC (decl))
23109 add_AT_flag (subr_die, DW_AT_external, 1);
23110
23111 add_name_and_src_coords_attributes (subr_die, decl);
23112 add_pubname (decl, subr_die);
23113 if (debug_info_level > DINFO_LEVEL_TERSE)
23114 {
23115 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23116 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23117 TYPE_UNQUALIFIED, false, context_die);
23118 }
23119
23120 add_pure_or_virtual_attribute (subr_die, decl);
23121 if (DECL_ARTIFICIAL (decl))
23122 add_AT_flag (subr_die, DW_AT_artificial, 1);
23123
23124 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23125 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23126
23127 add_alignment_attribute (subr_die, decl);
23128
23129 add_accessibility_attribute (subr_die, decl);
23130 }
23131
23132 /* Unless we have an existing non-declaration DIE, equate the new
23133 DIE. */
23134 if (!old_die || is_declaration_die (old_die))
23135 equate_decl_number_to_die (decl, subr_die);
23136
23137 if (declaration)
23138 {
23139 if (!old_die || !get_AT (old_die, DW_AT_inline))
23140 {
23141 add_AT_flag (subr_die, DW_AT_declaration, 1);
23142
23143 /* If this is an explicit function declaration then generate
23144 a DW_AT_explicit attribute. */
23145 if ((dwarf_version >= 3 || !dwarf_strict)
23146 && lang_hooks.decls.decl_dwarf_attribute (decl,
23147 DW_AT_explicit) == 1)
23148 add_AT_flag (subr_die, DW_AT_explicit, 1);
23149
23150 /* If this is a C++11 deleted special function member then generate
23151 a DW_AT_deleted attribute. */
23152 if ((dwarf_version >= 5 || !dwarf_strict)
23153 && lang_hooks.decls.decl_dwarf_attribute (decl,
23154 DW_AT_deleted) == 1)
23155 add_AT_flag (subr_die, DW_AT_deleted, 1);
23156
23157 /* If this is a C++11 defaulted special function member then
23158 generate a DW_AT_defaulted attribute. */
23159 if (dwarf_version >= 5 || !dwarf_strict)
23160 {
23161 int defaulted
23162 = lang_hooks.decls.decl_dwarf_attribute (decl,
23163 DW_AT_defaulted);
23164 if (defaulted != -1)
23165 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23166 }
23167
23168 /* If this is a C++11 non-static member function with & ref-qualifier
23169 then generate a DW_AT_reference attribute. */
23170 if ((dwarf_version >= 5 || !dwarf_strict)
23171 && lang_hooks.decls.decl_dwarf_attribute (decl,
23172 DW_AT_reference) == 1)
23173 add_AT_flag (subr_die, DW_AT_reference, 1);
23174
23175 /* If this is a C++11 non-static member function with &&
23176 ref-qualifier then generate a DW_AT_reference attribute. */
23177 if ((dwarf_version >= 5 || !dwarf_strict)
23178 && lang_hooks.decls.decl_dwarf_attribute (decl,
23179 DW_AT_rvalue_reference)
23180 == 1)
23181 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23182 }
23183 }
23184 /* For non DECL_EXTERNALs, if range information is available, fill
23185 the DIE with it. */
23186 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23187 {
23188 HOST_WIDE_INT cfa_fb_offset;
23189
23190 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23191
23192 if (!crtl->has_bb_partition)
23193 {
23194 dw_fde_ref fde = fun->fde;
23195 if (fde->dw_fde_begin)
23196 {
23197 /* We have already generated the labels. */
23198 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23199 fde->dw_fde_end, false);
23200 }
23201 else
23202 {
23203 /* Create start/end labels and add the range. */
23204 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23205 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23206 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23207 current_function_funcdef_no);
23208 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23209 current_function_funcdef_no);
23210 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23211 false);
23212 }
23213
23214 #if VMS_DEBUGGING_INFO
23215 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23216 Section 2.3 Prologue and Epilogue Attributes:
23217 When a breakpoint is set on entry to a function, it is generally
23218 desirable for execution to be suspended, not on the very first
23219 instruction of the function, but rather at a point after the
23220 function's frame has been set up, after any language defined local
23221 declaration processing has been completed, and before execution of
23222 the first statement of the function begins. Debuggers generally
23223 cannot properly determine where this point is. Similarly for a
23224 breakpoint set on exit from a function. The prologue and epilogue
23225 attributes allow a compiler to communicate the location(s) to use. */
23226
23227 {
23228 if (fde->dw_fde_vms_end_prologue)
23229 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23230 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23231
23232 if (fde->dw_fde_vms_begin_epilogue)
23233 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23234 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23235 }
23236 #endif
23237
23238 }
23239 else
23240 {
23241 /* Generate pubnames entries for the split function code ranges. */
23242 dw_fde_ref fde = fun->fde;
23243
23244 if (fde->dw_fde_second_begin)
23245 {
23246 if (dwarf_version >= 3 || !dwarf_strict)
23247 {
23248 /* We should use ranges for non-contiguous code section
23249 addresses. Use the actual code range for the initial
23250 section, since the HOT/COLD labels might precede an
23251 alignment offset. */
23252 bool range_list_added = false;
23253 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23254 fde->dw_fde_end, &range_list_added,
23255 false);
23256 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23257 fde->dw_fde_second_end,
23258 &range_list_added, false);
23259 if (range_list_added)
23260 add_ranges (NULL);
23261 }
23262 else
23263 {
23264 /* There is no real support in DW2 for this .. so we make
23265 a work-around. First, emit the pub name for the segment
23266 containing the function label. Then make and emit a
23267 simplified subprogram DIE for the second segment with the
23268 name pre-fixed by __hot/cold_sect_of_. We use the same
23269 linkage name for the second die so that gdb will find both
23270 sections when given "b foo". */
23271 const char *name = NULL;
23272 tree decl_name = DECL_NAME (decl);
23273 dw_die_ref seg_die;
23274
23275 /* Do the 'primary' section. */
23276 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23277 fde->dw_fde_end, false);
23278
23279 /* Build a minimal DIE for the secondary section. */
23280 seg_die = new_die (DW_TAG_subprogram,
23281 subr_die->die_parent, decl);
23282
23283 if (TREE_PUBLIC (decl))
23284 add_AT_flag (seg_die, DW_AT_external, 1);
23285
23286 if (decl_name != NULL
23287 && IDENTIFIER_POINTER (decl_name) != NULL)
23288 {
23289 name = dwarf2_name (decl, 1);
23290 if (! DECL_ARTIFICIAL (decl))
23291 add_src_coords_attributes (seg_die, decl);
23292
23293 add_linkage_name (seg_die, decl);
23294 }
23295 gcc_assert (name != NULL);
23296 add_pure_or_virtual_attribute (seg_die, decl);
23297 if (DECL_ARTIFICIAL (decl))
23298 add_AT_flag (seg_die, DW_AT_artificial, 1);
23299
23300 name = concat ("__second_sect_of_", name, NULL);
23301 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23302 fde->dw_fde_second_end, false);
23303 add_name_attribute (seg_die, name);
23304 if (want_pubnames ())
23305 add_pubname_string (name, seg_die);
23306 }
23307 }
23308 else
23309 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23310 false);
23311 }
23312
23313 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23314
23315 /* We define the "frame base" as the function's CFA. This is more
23316 convenient for several reasons: (1) It's stable across the prologue
23317 and epilogue, which makes it better than just a frame pointer,
23318 (2) With dwarf3, there exists a one-byte encoding that allows us
23319 to reference the .debug_frame data by proxy, but failing that,
23320 (3) We can at least reuse the code inspection and interpretation
23321 code that determines the CFA position at various points in the
23322 function. */
23323 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23324 {
23325 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23326 add_AT_loc (subr_die, DW_AT_frame_base, op);
23327 }
23328 else
23329 {
23330 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23331 if (list->dw_loc_next)
23332 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23333 else
23334 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23335 }
23336
23337 /* Compute a displacement from the "steady-state frame pointer" to
23338 the CFA. The former is what all stack slots and argument slots
23339 will reference in the rtl; the latter is what we've told the
23340 debugger about. We'll need to adjust all frame_base references
23341 by this displacement. */
23342 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23343
23344 if (fun->static_chain_decl)
23345 {
23346 /* DWARF requires here a location expression that computes the
23347 address of the enclosing subprogram's frame base. The machinery
23348 in tree-nested.c is supposed to store this specific address in the
23349 last field of the FRAME record. */
23350 const tree frame_type
23351 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23352 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23353
23354 tree fb_expr
23355 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23356 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23357 fb_expr, fb_decl, NULL_TREE);
23358
23359 add_AT_location_description (subr_die, DW_AT_static_link,
23360 loc_list_from_tree (fb_expr, 0, NULL));
23361 }
23362
23363 resolve_variable_values ();
23364 }
23365
23366 /* Generate child dies for template paramaters. */
23367 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23368 gen_generic_params_dies (decl);
23369
23370 /* Now output descriptions of the arguments for this function. This gets
23371 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23372 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23373 `...' at the end of the formal parameter list. In order to find out if
23374 there was a trailing ellipsis or not, we must instead look at the type
23375 associated with the FUNCTION_DECL. This will be a node of type
23376 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23377 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23378 an ellipsis at the end. */
23379
23380 /* In the case where we are describing a mere function declaration, all we
23381 need to do here (and all we *can* do here) is to describe the *types* of
23382 its formal parameters. */
23383 if (debug_info_level <= DINFO_LEVEL_TERSE)
23384 ;
23385 else if (declaration)
23386 gen_formal_types_die (decl, subr_die);
23387 else
23388 {
23389 /* Generate DIEs to represent all known formal parameters. */
23390 tree parm = DECL_ARGUMENTS (decl);
23391 tree generic_decl = early_dwarf
23392 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23393 tree generic_decl_parm = generic_decl
23394 ? DECL_ARGUMENTS (generic_decl)
23395 : NULL;
23396
23397 /* Now we want to walk the list of parameters of the function and
23398 emit their relevant DIEs.
23399
23400 We consider the case of DECL being an instance of a generic function
23401 as well as it being a normal function.
23402
23403 If DECL is an instance of a generic function we walk the
23404 parameters of the generic function declaration _and_ the parameters of
23405 DECL itself. This is useful because we want to emit specific DIEs for
23406 function parameter packs and those are declared as part of the
23407 generic function declaration. In that particular case,
23408 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23409 That DIE has children DIEs representing the set of arguments
23410 of the pack. Note that the set of pack arguments can be empty.
23411 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23412 children DIE.
23413
23414 Otherwise, we just consider the parameters of DECL. */
23415 while (generic_decl_parm || parm)
23416 {
23417 if (generic_decl_parm
23418 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23419 gen_formal_parameter_pack_die (generic_decl_parm,
23420 parm, subr_die,
23421 &parm);
23422 else if (parm)
23423 {
23424 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23425
23426 if (early_dwarf
23427 && parm == DECL_ARGUMENTS (decl)
23428 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23429 && parm_die
23430 && (dwarf_version >= 3 || !dwarf_strict))
23431 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23432
23433 parm = DECL_CHAIN (parm);
23434 }
23435
23436 if (generic_decl_parm)
23437 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23438 }
23439
23440 /* Decide whether we need an unspecified_parameters DIE at the end.
23441 There are 2 more cases to do this for: 1) the ansi ... declaration -
23442 this is detectable when the end of the arg list is not a
23443 void_type_node 2) an unprototyped function declaration (not a
23444 definition). This just means that we have no info about the
23445 parameters at all. */
23446 if (early_dwarf)
23447 {
23448 if (prototype_p (TREE_TYPE (decl)))
23449 {
23450 /* This is the prototyped case, check for.... */
23451 if (stdarg_p (TREE_TYPE (decl)))
23452 gen_unspecified_parameters_die (decl, subr_die);
23453 }
23454 else if (DECL_INITIAL (decl) == NULL_TREE)
23455 gen_unspecified_parameters_die (decl, subr_die);
23456 }
23457 else if ((subr_die != old_die || old_die_had_no_children)
23458 && prototype_p (TREE_TYPE (decl))
23459 && stdarg_p (TREE_TYPE (decl)))
23460 gen_unspecified_parameters_die (decl, subr_die);
23461 }
23462
23463 if (subr_die != old_die)
23464 /* Add the calling convention attribute if requested. */
23465 add_calling_convention_attribute (subr_die, decl);
23466
23467 /* Output Dwarf info for all of the stuff within the body of the function
23468 (if it has one - it may be just a declaration).
23469
23470 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23471 a function. This BLOCK actually represents the outermost binding contour
23472 for the function, i.e. the contour in which the function's formal
23473 parameters and labels get declared. Curiously, it appears that the front
23474 end doesn't actually put the PARM_DECL nodes for the current function onto
23475 the BLOCK_VARS list for this outer scope, but are strung off of the
23476 DECL_ARGUMENTS list for the function instead.
23477
23478 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23479 the LABEL_DECL nodes for the function however, and we output DWARF info
23480 for those in decls_for_scope. Just within the `outer_scope' there will be
23481 a BLOCK node representing the function's outermost pair of curly braces,
23482 and any blocks used for the base and member initializers of a C++
23483 constructor function. */
23484 tree outer_scope = DECL_INITIAL (decl);
23485 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23486 {
23487 int call_site_note_count = 0;
23488 int tail_call_site_note_count = 0;
23489
23490 /* Emit a DW_TAG_variable DIE for a named return value. */
23491 if (DECL_NAME (DECL_RESULT (decl)))
23492 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23493
23494 /* The first time through decls_for_scope we will generate the
23495 DIEs for the locals. The second time, we fill in the
23496 location info. */
23497 decls_for_scope (outer_scope, subr_die);
23498
23499 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23500 {
23501 struct call_arg_loc_node *ca_loc;
23502 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23503 {
23504 dw_die_ref die = NULL;
23505 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23506 rtx arg, next_arg;
23507 tree arg_decl = NULL_TREE;
23508
23509 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23510 ? XEXP (ca_loc->call_arg_loc_note, 0)
23511 : NULL_RTX);
23512 arg; arg = next_arg)
23513 {
23514 dw_loc_descr_ref reg, val;
23515 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23516 dw_die_ref cdie, tdie = NULL;
23517
23518 next_arg = XEXP (arg, 1);
23519 if (REG_P (XEXP (XEXP (arg, 0), 0))
23520 && next_arg
23521 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23522 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23523 && REGNO (XEXP (XEXP (arg, 0), 0))
23524 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23525 next_arg = XEXP (next_arg, 1);
23526 if (mode == VOIDmode)
23527 {
23528 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23529 if (mode == VOIDmode)
23530 mode = GET_MODE (XEXP (arg, 0));
23531 }
23532 if (mode == VOIDmode || mode == BLKmode)
23533 continue;
23534 /* Get dynamic information about call target only if we
23535 have no static information: we cannot generate both
23536 DW_AT_call_origin and DW_AT_call_target
23537 attributes. */
23538 if (ca_loc->symbol_ref == NULL_RTX)
23539 {
23540 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23541 {
23542 tloc = XEXP (XEXP (arg, 0), 1);
23543 continue;
23544 }
23545 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23546 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23547 {
23548 tlocc = XEXP (XEXP (arg, 0), 1);
23549 continue;
23550 }
23551 }
23552 reg = NULL;
23553 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23554 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23555 VAR_INIT_STATUS_INITIALIZED);
23556 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23557 {
23558 rtx mem = XEXP (XEXP (arg, 0), 0);
23559 reg = mem_loc_descriptor (XEXP (mem, 0),
23560 get_address_mode (mem),
23561 GET_MODE (mem),
23562 VAR_INIT_STATUS_INITIALIZED);
23563 }
23564 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23565 == DEBUG_PARAMETER_REF)
23566 {
23567 tree tdecl
23568 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23569 tdie = lookup_decl_die (tdecl);
23570 if (tdie == NULL)
23571 continue;
23572 arg_decl = tdecl;
23573 }
23574 else
23575 continue;
23576 if (reg == NULL
23577 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23578 != DEBUG_PARAMETER_REF)
23579 continue;
23580 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23581 VOIDmode,
23582 VAR_INIT_STATUS_INITIALIZED);
23583 if (val == NULL)
23584 continue;
23585 if (die == NULL)
23586 die = gen_call_site_die (decl, subr_die, ca_loc);
23587 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23588 NULL_TREE);
23589 add_desc_attribute (cdie, arg_decl);
23590 if (reg != NULL)
23591 add_AT_loc (cdie, DW_AT_location, reg);
23592 else if (tdie != NULL)
23593 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23594 tdie);
23595 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23596 if (next_arg != XEXP (arg, 1))
23597 {
23598 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23599 if (mode == VOIDmode)
23600 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23601 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23602 0), 1),
23603 mode, VOIDmode,
23604 VAR_INIT_STATUS_INITIALIZED);
23605 if (val != NULL)
23606 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23607 val);
23608 }
23609 }
23610 if (die == NULL
23611 && (ca_loc->symbol_ref || tloc))
23612 die = gen_call_site_die (decl, subr_die, ca_loc);
23613 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23614 {
23615 dw_loc_descr_ref tval = NULL;
23616
23617 if (tloc != NULL_RTX)
23618 tval = mem_loc_descriptor (tloc,
23619 GET_MODE (tloc) == VOIDmode
23620 ? Pmode : GET_MODE (tloc),
23621 VOIDmode,
23622 VAR_INIT_STATUS_INITIALIZED);
23623 if (tval)
23624 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23625 else if (tlocc != NULL_RTX)
23626 {
23627 tval = mem_loc_descriptor (tlocc,
23628 GET_MODE (tlocc) == VOIDmode
23629 ? Pmode : GET_MODE (tlocc),
23630 VOIDmode,
23631 VAR_INIT_STATUS_INITIALIZED);
23632 if (tval)
23633 add_AT_loc (die,
23634 dwarf_AT (DW_AT_call_target_clobbered),
23635 tval);
23636 }
23637 }
23638 if (die != NULL)
23639 {
23640 call_site_note_count++;
23641 if (ca_loc->tail_call_p)
23642 tail_call_site_note_count++;
23643 }
23644 }
23645 }
23646 call_arg_locations = NULL;
23647 call_arg_loc_last = NULL;
23648 if (tail_call_site_count >= 0
23649 && tail_call_site_count == tail_call_site_note_count
23650 && (!dwarf_strict || dwarf_version >= 5))
23651 {
23652 if (call_site_count >= 0
23653 && call_site_count == call_site_note_count)
23654 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23655 else
23656 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23657 }
23658 call_site_count = -1;
23659 tail_call_site_count = -1;
23660 }
23661
23662 /* Mark used types after we have created DIEs for the functions scopes. */
23663 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23664 }
23665
23666 /* Returns a hash value for X (which really is a die_struct). */
23667
23668 hashval_t
23669 block_die_hasher::hash (die_struct *d)
23670 {
23671 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23672 }
23673
23674 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23675 as decl_id and die_parent of die_struct Y. */
23676
23677 bool
23678 block_die_hasher::equal (die_struct *x, die_struct *y)
23679 {
23680 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23681 }
23682
23683 /* Hold information about markers for inlined entry points. */
23684 struct GTY ((for_user)) inline_entry_data
23685 {
23686 /* The block that's the inlined_function_outer_scope for an inlined
23687 function. */
23688 tree block;
23689
23690 /* The label at the inlined entry point. */
23691 const char *label_pfx;
23692 unsigned int label_num;
23693
23694 /* The view number to be used as the inlined entry point. */
23695 var_loc_view view;
23696 };
23697
23698 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23699 {
23700 typedef tree compare_type;
23701 static inline hashval_t hash (const inline_entry_data *);
23702 static inline bool equal (const inline_entry_data *, const_tree);
23703 };
23704
23705 /* Hash table routines for inline_entry_data. */
23706
23707 inline hashval_t
23708 inline_entry_data_hasher::hash (const inline_entry_data *data)
23709 {
23710 return htab_hash_pointer (data->block);
23711 }
23712
23713 inline bool
23714 inline_entry_data_hasher::equal (const inline_entry_data *data,
23715 const_tree block)
23716 {
23717 return data->block == block;
23718 }
23719
23720 /* Inlined entry points pending DIE creation in this compilation unit. */
23721
23722 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23723
23724
23725 /* Return TRUE if DECL, which may have been previously generated as
23726 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23727 true if decl (or its origin) is either an extern declaration or a
23728 class/namespace scoped declaration.
23729
23730 The declare_in_namespace support causes us to get two DIEs for one
23731 variable, both of which are declarations. We want to avoid
23732 considering one to be a specification, so we must test for
23733 DECLARATION and DW_AT_declaration. */
23734 static inline bool
23735 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23736 {
23737 return (old_die && TREE_STATIC (decl) && !declaration
23738 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23739 }
23740
23741 /* Return true if DECL is a local static. */
23742
23743 static inline bool
23744 local_function_static (tree decl)
23745 {
23746 gcc_assert (VAR_P (decl));
23747 return TREE_STATIC (decl)
23748 && DECL_CONTEXT (decl)
23749 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23750 }
23751
23752 /* Return true iff DECL overrides (presumably completes) the type of
23753 OLD_DIE within CONTEXT_DIE. */
23754
23755 static bool
23756 override_type_for_decl_p (tree decl, dw_die_ref old_die,
23757 dw_die_ref context_die)
23758 {
23759 tree type = TREE_TYPE (decl);
23760 int cv_quals;
23761
23762 if (decl_by_reference_p (decl))
23763 {
23764 type = TREE_TYPE (type);
23765 cv_quals = TYPE_UNQUALIFIED;
23766 }
23767 else
23768 cv_quals = decl_quals (decl);
23769
23770 dw_die_ref type_die = modified_type_die (type,
23771 cv_quals | TYPE_QUALS (type),
23772 false,
23773 context_die);
23774
23775 dw_die_ref old_type_die = get_AT_ref (old_die, DW_AT_type);
23776
23777 return type_die != old_type_die;
23778 }
23779
23780 /* Generate a DIE to represent a declared data object.
23781 Either DECL or ORIGIN must be non-null. */
23782
23783 static void
23784 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23785 {
23786 HOST_WIDE_INT off = 0;
23787 tree com_decl;
23788 tree decl_or_origin = decl ? decl : origin;
23789 tree ultimate_origin;
23790 dw_die_ref var_die;
23791 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23792 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23793 || class_or_namespace_scope_p (context_die));
23794 bool specialization_p = false;
23795 bool no_linkage_name = false;
23796
23797 /* While C++ inline static data members have definitions inside of the
23798 class, force the first DIE to be a declaration, then let gen_member_die
23799 reparent it to the class context and call gen_variable_die again
23800 to create the outside of the class DIE for the definition. */
23801 if (!declaration
23802 && old_die == NULL
23803 && decl
23804 && DECL_CONTEXT (decl)
23805 && TYPE_P (DECL_CONTEXT (decl))
23806 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23807 {
23808 declaration = true;
23809 if (dwarf_version < 5)
23810 no_linkage_name = true;
23811 }
23812
23813 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23814 if (decl || ultimate_origin)
23815 origin = ultimate_origin;
23816 com_decl = fortran_common (decl_or_origin, &off);
23817
23818 /* Symbol in common gets emitted as a child of the common block, in the form
23819 of a data member. */
23820 if (com_decl)
23821 {
23822 dw_die_ref com_die;
23823 dw_loc_list_ref loc = NULL;
23824 die_node com_die_arg;
23825
23826 var_die = lookup_decl_die (decl_or_origin);
23827 if (var_die)
23828 {
23829 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23830 {
23831 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23832 if (loc)
23833 {
23834 if (off)
23835 {
23836 /* Optimize the common case. */
23837 if (single_element_loc_list_p (loc)
23838 && loc->expr->dw_loc_opc == DW_OP_addr
23839 && loc->expr->dw_loc_next == NULL
23840 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23841 == SYMBOL_REF)
23842 {
23843 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23844 loc->expr->dw_loc_oprnd1.v.val_addr
23845 = plus_constant (GET_MODE (x), x , off);
23846 }
23847 else
23848 loc_list_plus_const (loc, off);
23849 }
23850 add_AT_location_description (var_die, DW_AT_location, loc);
23851 remove_AT (var_die, DW_AT_declaration);
23852 }
23853 }
23854 return;
23855 }
23856
23857 if (common_block_die_table == NULL)
23858 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23859
23860 com_die_arg.decl_id = DECL_UID (com_decl);
23861 com_die_arg.die_parent = context_die;
23862 com_die = common_block_die_table->find (&com_die_arg);
23863 if (! early_dwarf)
23864 loc = loc_list_from_tree (com_decl, 2, NULL);
23865 if (com_die == NULL)
23866 {
23867 const char *cnam
23868 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23869 die_node **slot;
23870
23871 com_die = new_die (DW_TAG_common_block, context_die, decl);
23872 add_name_and_src_coords_attributes (com_die, com_decl);
23873 if (loc)
23874 {
23875 add_AT_location_description (com_die, DW_AT_location, loc);
23876 /* Avoid sharing the same loc descriptor between
23877 DW_TAG_common_block and DW_TAG_variable. */
23878 loc = loc_list_from_tree (com_decl, 2, NULL);
23879 }
23880 else if (DECL_EXTERNAL (decl_or_origin))
23881 add_AT_flag (com_die, DW_AT_declaration, 1);
23882 if (want_pubnames ())
23883 add_pubname_string (cnam, com_die); /* ??? needed? */
23884 com_die->decl_id = DECL_UID (com_decl);
23885 slot = common_block_die_table->find_slot (com_die, INSERT);
23886 *slot = com_die;
23887 }
23888 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23889 {
23890 add_AT_location_description (com_die, DW_AT_location, loc);
23891 loc = loc_list_from_tree (com_decl, 2, NULL);
23892 remove_AT (com_die, DW_AT_declaration);
23893 }
23894 var_die = new_die (DW_TAG_variable, com_die, decl);
23895 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23896 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23897 decl_quals (decl_or_origin), false,
23898 context_die);
23899 add_alignment_attribute (var_die, decl);
23900 add_AT_flag (var_die, DW_AT_external, 1);
23901 if (loc)
23902 {
23903 if (off)
23904 {
23905 /* Optimize the common case. */
23906 if (single_element_loc_list_p (loc)
23907 && loc->expr->dw_loc_opc == DW_OP_addr
23908 && loc->expr->dw_loc_next == NULL
23909 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23910 {
23911 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23912 loc->expr->dw_loc_oprnd1.v.val_addr
23913 = plus_constant (GET_MODE (x), x, off);
23914 }
23915 else
23916 loc_list_plus_const (loc, off);
23917 }
23918 add_AT_location_description (var_die, DW_AT_location, loc);
23919 }
23920 else if (DECL_EXTERNAL (decl_or_origin))
23921 add_AT_flag (var_die, DW_AT_declaration, 1);
23922 if (decl)
23923 equate_decl_number_to_die (decl, var_die);
23924 return;
23925 }
23926
23927 if (old_die)
23928 {
23929 if (declaration)
23930 {
23931 /* A declaration that has been previously dumped, needs no
23932 further annotations, since it doesn't need location on
23933 the second pass. */
23934 return;
23935 }
23936 else if (decl_will_get_specification_p (old_die, decl, declaration)
23937 && !get_AT (old_die, DW_AT_specification))
23938 {
23939 /* Fall-thru so we can make a new variable die along with a
23940 DW_AT_specification. */
23941 }
23942 else if (origin && old_die->die_parent != context_die)
23943 {
23944 /* If we will be creating an inlined instance, we need a
23945 new DIE that will get annotated with
23946 DW_AT_abstract_origin. */
23947 gcc_assert (!DECL_ABSTRACT_P (decl));
23948 }
23949 else
23950 {
23951 /* If a DIE was dumped early, it still needs location info.
23952 Skip to where we fill the location bits. */
23953 var_die = old_die;
23954
23955 /* ??? In LTRANS we cannot annotate early created variably
23956 modified type DIEs without copying them and adjusting all
23957 references to them. Thus we dumped them again. Also add a
23958 reference to them but beware of -g0 compile and -g link
23959 in which case the reference will be already present. */
23960 tree type = TREE_TYPE (decl_or_origin);
23961 if (in_lto_p
23962 && ! get_AT (var_die, DW_AT_type)
23963 && variably_modified_type_p
23964 (type, decl_function_context (decl_or_origin)))
23965 {
23966 if (decl_by_reference_p (decl_or_origin))
23967 add_type_attribute (var_die, TREE_TYPE (type),
23968 TYPE_UNQUALIFIED, false, context_die);
23969 else
23970 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23971 false, context_die);
23972 }
23973
23974 goto gen_variable_die_location;
23975 }
23976 }
23977
23978 /* For static data members, the declaration in the class is supposed
23979 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23980 also in DWARF2; the specification should still be DW_TAG_variable
23981 referencing the DW_TAG_member DIE. */
23982 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23983 var_die = new_die (DW_TAG_member, context_die, decl);
23984 else
23985 var_die = new_die (DW_TAG_variable, context_die, decl);
23986
23987 if (origin != NULL)
23988 add_abstract_origin_attribute (var_die, origin);
23989
23990 /* Loop unrolling can create multiple blocks that refer to the same
23991 static variable, so we must test for the DW_AT_declaration flag.
23992
23993 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23994 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23995 sharing them.
23996
23997 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23998 else if (decl_will_get_specification_p (old_die, decl, declaration))
23999 {
24000 /* This is a definition of a C++ class level static. */
24001 add_AT_specification (var_die, old_die);
24002 specialization_p = true;
24003 if (DECL_NAME (decl))
24004 {
24005 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
24006 struct dwarf_file_data * file_index = lookup_filename (s.file);
24007
24008 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
24009 add_AT_file (var_die, DW_AT_decl_file, file_index);
24010
24011 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
24012 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
24013
24014 if (debug_column_info
24015 && s.column
24016 && (get_AT_unsigned (old_die, DW_AT_decl_column)
24017 != (unsigned) s.column))
24018 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
24019
24020 if (old_die->die_tag == DW_TAG_member)
24021 add_linkage_name (var_die, decl);
24022 }
24023 }
24024 else
24025 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
24026
24027 if ((origin == NULL && !specialization_p)
24028 || (origin != NULL
24029 && !DECL_ABSTRACT_P (decl_or_origin)
24030 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
24031 decl_function_context
24032 (decl_or_origin)))
24033 || (old_die && specialization_p
24034 && override_type_for_decl_p (decl_or_origin, old_die, context_die)))
24035 {
24036 tree type = TREE_TYPE (decl_or_origin);
24037
24038 if (decl_by_reference_p (decl_or_origin))
24039 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24040 context_die);
24041 else
24042 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
24043 context_die);
24044 }
24045
24046 if (origin == NULL && !specialization_p)
24047 {
24048 if (TREE_PUBLIC (decl))
24049 add_AT_flag (var_die, DW_AT_external, 1);
24050
24051 if (DECL_ARTIFICIAL (decl))
24052 add_AT_flag (var_die, DW_AT_artificial, 1);
24053
24054 add_alignment_attribute (var_die, decl);
24055
24056 add_accessibility_attribute (var_die, decl);
24057 }
24058
24059 if (declaration)
24060 add_AT_flag (var_die, DW_AT_declaration, 1);
24061
24062 if (decl && (DECL_ABSTRACT_P (decl)
24063 || !old_die || is_declaration_die (old_die)))
24064 equate_decl_number_to_die (decl, var_die);
24065
24066 gen_variable_die_location:
24067 if (! declaration
24068 && (! DECL_ABSTRACT_P (decl_or_origin)
24069 /* Local static vars are shared between all clones/inlines,
24070 so emit DW_AT_location on the abstract DIE if DECL_RTL is
24071 already set. */
24072 || (VAR_P (decl_or_origin)
24073 && TREE_STATIC (decl_or_origin)
24074 && DECL_RTL_SET_P (decl_or_origin))))
24075 {
24076 if (early_dwarf)
24077 add_pubname (decl_or_origin, var_die);
24078 else
24079 add_location_or_const_value_attribute (var_die, decl_or_origin,
24080 decl == NULL);
24081 }
24082 else
24083 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
24084
24085 if ((dwarf_version >= 4 || !dwarf_strict)
24086 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24087 DW_AT_const_expr) == 1
24088 && !get_AT (var_die, DW_AT_const_expr)
24089 && !specialization_p)
24090 add_AT_flag (var_die, DW_AT_const_expr, 1);
24091
24092 if (!dwarf_strict)
24093 {
24094 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24095 DW_AT_inline);
24096 if (inl != -1
24097 && !get_AT (var_die, DW_AT_inline)
24098 && !specialization_p)
24099 add_AT_unsigned (var_die, DW_AT_inline, inl);
24100 }
24101 }
24102
24103 /* Generate a DIE to represent a named constant. */
24104
24105 static void
24106 gen_const_die (tree decl, dw_die_ref context_die)
24107 {
24108 dw_die_ref const_die;
24109 tree type = TREE_TYPE (decl);
24110
24111 const_die = lookup_decl_die (decl);
24112 if (const_die)
24113 return;
24114
24115 const_die = new_die (DW_TAG_constant, context_die, decl);
24116 equate_decl_number_to_die (decl, const_die);
24117 add_name_and_src_coords_attributes (const_die, decl);
24118 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24119 if (TREE_PUBLIC (decl))
24120 add_AT_flag (const_die, DW_AT_external, 1);
24121 if (DECL_ARTIFICIAL (decl))
24122 add_AT_flag (const_die, DW_AT_artificial, 1);
24123 tree_add_const_value_attribute_for_decl (const_die, decl);
24124 }
24125
24126 /* Generate a DIE to represent a label identifier. */
24127
24128 static void
24129 gen_label_die (tree decl, dw_die_ref context_die)
24130 {
24131 tree origin = decl_ultimate_origin (decl);
24132 dw_die_ref lbl_die = lookup_decl_die (decl);
24133 rtx insn;
24134 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24135
24136 if (!lbl_die)
24137 {
24138 lbl_die = new_die (DW_TAG_label, context_die, decl);
24139 equate_decl_number_to_die (decl, lbl_die);
24140
24141 if (origin != NULL)
24142 add_abstract_origin_attribute (lbl_die, origin);
24143 else
24144 add_name_and_src_coords_attributes (lbl_die, decl);
24145 }
24146
24147 if (DECL_ABSTRACT_P (decl))
24148 equate_decl_number_to_die (decl, lbl_die);
24149 else if (! early_dwarf)
24150 {
24151 insn = DECL_RTL_IF_SET (decl);
24152
24153 /* Deleted labels are programmer specified labels which have been
24154 eliminated because of various optimizations. We still emit them
24155 here so that it is possible to put breakpoints on them. */
24156 if (insn
24157 && (LABEL_P (insn)
24158 || ((NOTE_P (insn)
24159 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24160 {
24161 /* When optimization is enabled (via -O) some parts of the compiler
24162 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24163 represent source-level labels which were explicitly declared by
24164 the user. This really shouldn't be happening though, so catch
24165 it if it ever does happen. */
24166 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24167
24168 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24169 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24170 }
24171 else if (insn
24172 && NOTE_P (insn)
24173 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24174 && CODE_LABEL_NUMBER (insn) != -1)
24175 {
24176 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24177 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24178 }
24179 }
24180 }
24181
24182 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24183 attributes to the DIE for a block STMT, to describe where the inlined
24184 function was called from. This is similar to add_src_coords_attributes. */
24185
24186 static inline void
24187 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24188 {
24189 /* We can end up with BUILTINS_LOCATION here. */
24190 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24191 return;
24192
24193 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24194
24195 if (dwarf_version >= 3 || !dwarf_strict)
24196 {
24197 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24198 add_AT_unsigned (die, DW_AT_call_line, s.line);
24199 if (debug_column_info && s.column)
24200 add_AT_unsigned (die, DW_AT_call_column, s.column);
24201 }
24202 }
24203
24204
24205 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24206 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24207
24208 static inline void
24209 add_high_low_attributes (tree stmt, dw_die_ref die)
24210 {
24211 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24212
24213 if (inline_entry_data **iedp
24214 = !inline_entry_data_table ? NULL
24215 : inline_entry_data_table->find_slot_with_hash (stmt,
24216 htab_hash_pointer (stmt),
24217 NO_INSERT))
24218 {
24219 inline_entry_data *ied = *iedp;
24220 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24221 gcc_assert (debug_inline_points);
24222 gcc_assert (inlined_function_outer_scope_p (stmt));
24223
24224 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24225 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24226
24227 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24228 && !dwarf_strict)
24229 {
24230 if (!output_asm_line_debug_info ())
24231 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24232 else
24233 {
24234 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24235 /* FIXME: this will resolve to a small number. Could we
24236 possibly emit smaller data? Ideally we'd emit a
24237 uleb128, but that would make the size of DIEs
24238 impossible for the compiler to compute, since it's
24239 the assembler that computes the value of the view
24240 label in this case. Ideally, we'd have a single form
24241 encompassing both the address and the view, and
24242 indirecting them through a table might make things
24243 easier, but even that would be more wasteful,
24244 space-wise, than what we have now. */
24245 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24246 }
24247 }
24248
24249 inline_entry_data_table->clear_slot (iedp);
24250 }
24251
24252 if (BLOCK_FRAGMENT_CHAIN (stmt)
24253 && (dwarf_version >= 3 || !dwarf_strict))
24254 {
24255 tree chain, superblock = NULL_TREE;
24256 dw_die_ref pdie;
24257 dw_attr_node *attr = NULL;
24258
24259 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24260 {
24261 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24262 BLOCK_NUMBER (stmt));
24263 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24264 }
24265
24266 /* Optimize duplicate .debug_ranges lists or even tails of
24267 lists. If this BLOCK has same ranges as its supercontext,
24268 lookup DW_AT_ranges attribute in the supercontext (and
24269 recursively so), verify that the ranges_table contains the
24270 right values and use it instead of adding a new .debug_range. */
24271 for (chain = stmt, pdie = die;
24272 BLOCK_SAME_RANGE (chain);
24273 chain = BLOCK_SUPERCONTEXT (chain))
24274 {
24275 dw_attr_node *new_attr;
24276
24277 pdie = pdie->die_parent;
24278 if (pdie == NULL)
24279 break;
24280 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24281 break;
24282 new_attr = get_AT (pdie, DW_AT_ranges);
24283 if (new_attr == NULL
24284 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24285 break;
24286 attr = new_attr;
24287 superblock = BLOCK_SUPERCONTEXT (chain);
24288 }
24289 if (attr != NULL
24290 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24291 == (int)BLOCK_NUMBER (superblock))
24292 && BLOCK_FRAGMENT_CHAIN (superblock))
24293 {
24294 unsigned long off = attr->dw_attr_val.v.val_offset;
24295 unsigned long supercnt = 0, thiscnt = 0;
24296 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24297 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24298 {
24299 ++supercnt;
24300 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24301 == (int)BLOCK_NUMBER (chain));
24302 }
24303 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24304 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24305 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24306 ++thiscnt;
24307 gcc_assert (supercnt >= thiscnt);
24308 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24309 false);
24310 note_rnglist_head (off + supercnt - thiscnt);
24311 return;
24312 }
24313
24314 unsigned int offset = add_ranges (stmt, true);
24315 add_AT_range_list (die, DW_AT_ranges, offset, false);
24316 note_rnglist_head (offset);
24317
24318 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24319 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24320 do
24321 {
24322 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24323 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24324 chain = BLOCK_FRAGMENT_CHAIN (chain);
24325 }
24326 while (chain);
24327 add_ranges (NULL);
24328 }
24329 else
24330 {
24331 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24332 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24333 BLOCK_NUMBER (stmt));
24334 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24335 BLOCK_NUMBER (stmt));
24336 add_AT_low_high_pc (die, label, label_high, false);
24337 }
24338 }
24339
24340 /* Generate a DIE for a lexical block. */
24341
24342 static void
24343 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24344 {
24345 dw_die_ref old_die = lookup_block_die (stmt);
24346 dw_die_ref stmt_die = NULL;
24347 if (!old_die)
24348 {
24349 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24350 equate_block_to_die (stmt, stmt_die);
24351 }
24352
24353 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24354 {
24355 /* If this is an inlined or conrecte instance, create a new lexical
24356 die for anything below to attach DW_AT_abstract_origin to. */
24357 if (old_die)
24358 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24359
24360 tree origin = block_ultimate_origin (stmt);
24361 if (origin != NULL_TREE && (origin != stmt || old_die))
24362 add_abstract_origin_attribute (stmt_die, origin);
24363
24364 old_die = NULL;
24365 }
24366
24367 if (old_die)
24368 stmt_die = old_die;
24369
24370 /* A non abstract block whose blocks have already been reordered
24371 should have the instruction range for this block. If so, set the
24372 high/low attributes. */
24373 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24374 {
24375 gcc_assert (stmt_die);
24376 add_high_low_attributes (stmt, stmt_die);
24377 }
24378
24379 decls_for_scope (stmt, stmt_die);
24380 }
24381
24382 /* Generate a DIE for an inlined subprogram. */
24383
24384 static void
24385 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24386 {
24387 tree decl = block_ultimate_origin (stmt);
24388
24389 /* Make sure any inlined functions are known to be inlineable. */
24390 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24391 || cgraph_function_possibly_inlined_p (decl));
24392
24393 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24394
24395 if (call_arg_locations || debug_inline_points)
24396 equate_block_to_die (stmt, subr_die);
24397 add_abstract_origin_attribute (subr_die, decl);
24398 if (TREE_ASM_WRITTEN (stmt))
24399 add_high_low_attributes (stmt, subr_die);
24400 add_call_src_coords_attributes (stmt, subr_die);
24401
24402 /* The inliner creates an extra BLOCK for the parameter setup,
24403 we want to merge that with the actual outermost BLOCK of the
24404 inlined function to avoid duplicate locals in consumers.
24405 Do that by doing the recursion to subblocks on the single subblock
24406 of STMT. */
24407 bool unwrap_one = false;
24408 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24409 {
24410 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24411 if (origin
24412 && TREE_CODE (origin) == BLOCK
24413 && BLOCK_SUPERCONTEXT (origin) == decl)
24414 unwrap_one = true;
24415 }
24416 decls_for_scope (stmt, subr_die, !unwrap_one);
24417 if (unwrap_one)
24418 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24419 }
24420
24421 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24422 the comment for VLR_CONTEXT. */
24423
24424 static void
24425 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24426 {
24427 dw_die_ref decl_die;
24428
24429 if (TREE_TYPE (decl) == error_mark_node)
24430 return;
24431
24432 decl_die = new_die (DW_TAG_member, context_die, decl);
24433 add_name_and_src_coords_attributes (decl_die, decl);
24434 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24435 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24436 context_die);
24437
24438 if (DECL_BIT_FIELD_TYPE (decl))
24439 {
24440 add_byte_size_attribute (decl_die, decl);
24441 add_bit_size_attribute (decl_die, decl);
24442 add_bit_offset_attribute (decl_die, decl);
24443 }
24444
24445 add_alignment_attribute (decl_die, decl);
24446
24447 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24448 add_data_member_location_attribute (decl_die, decl, ctx);
24449
24450 if (DECL_ARTIFICIAL (decl))
24451 add_AT_flag (decl_die, DW_AT_artificial, 1);
24452
24453 add_accessibility_attribute (decl_die, decl);
24454
24455 /* Equate decl number to die, so that we can look up this decl later on. */
24456 equate_decl_number_to_die (decl, decl_die);
24457 }
24458
24459 /* Generate a DIE for a pointer to a member type. TYPE can be an
24460 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24461 pointer to member function. */
24462
24463 static void
24464 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24465 {
24466 if (lookup_type_die (type))
24467 return;
24468
24469 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24470 scope_die_for (type, context_die), type);
24471
24472 equate_type_number_to_die (type, ptr_die);
24473 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24474 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24475 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24476 context_die);
24477 add_alignment_attribute (ptr_die, type);
24478
24479 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24480 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24481 {
24482 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24483 add_AT_loc (ptr_die, DW_AT_use_location, op);
24484 }
24485 }
24486
24487 static char *producer_string;
24488
24489 /* Given a C and/or C++ language/version string return the "highest".
24490 C++ is assumed to be "higher" than C in this case. Used for merging
24491 LTO translation unit languages. */
24492 static const char *
24493 highest_c_language (const char *lang1, const char *lang2)
24494 {
24495 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24496 return "GNU C++17";
24497 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24498 return "GNU C++14";
24499 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24500 return "GNU C++11";
24501 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24502 return "GNU C++98";
24503
24504 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24505 return "GNU C2X";
24506 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24507 return "GNU C17";
24508 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24509 return "GNU C11";
24510 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24511 return "GNU C99";
24512 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24513 return "GNU C89";
24514
24515 gcc_unreachable ();
24516 }
24517
24518
24519 /* Generate the DIE for the compilation unit. */
24520
24521 static dw_die_ref
24522 gen_compile_unit_die (const char *filename)
24523 {
24524 dw_die_ref die;
24525 const char *language_string = lang_hooks.name;
24526 int language;
24527
24528 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24529
24530 if (filename)
24531 {
24532 add_filename_attribute (die, filename);
24533 /* Don't add cwd for <built-in>. */
24534 if (filename[0] != '<')
24535 add_comp_dir_attribute (die);
24536 }
24537
24538 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24539
24540 /* If our producer is LTO try to figure out a common language to use
24541 from the global list of translation units. */
24542 if (strcmp (language_string, "GNU GIMPLE") == 0)
24543 {
24544 unsigned i;
24545 tree t;
24546 const char *common_lang = NULL;
24547
24548 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24549 {
24550 if (!TRANSLATION_UNIT_LANGUAGE (t))
24551 continue;
24552 if (!common_lang)
24553 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24554 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24555 ;
24556 else if (strncmp (common_lang, "GNU C", 5) == 0
24557 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24558 /* Mixing C and C++ is ok, use C++ in that case. */
24559 common_lang = highest_c_language (common_lang,
24560 TRANSLATION_UNIT_LANGUAGE (t));
24561 else
24562 {
24563 /* Fall back to C. */
24564 common_lang = NULL;
24565 break;
24566 }
24567 }
24568
24569 if (common_lang)
24570 language_string = common_lang;
24571 }
24572
24573 language = DW_LANG_C;
24574 if (strncmp (language_string, "GNU C", 5) == 0
24575 && ISDIGIT (language_string[5]))
24576 {
24577 language = DW_LANG_C89;
24578 if (dwarf_version >= 3 || !dwarf_strict)
24579 {
24580 if (strcmp (language_string, "GNU C89") != 0)
24581 language = DW_LANG_C99;
24582
24583 if (dwarf_version >= 5 /* || !dwarf_strict */)
24584 if (strcmp (language_string, "GNU C11") == 0
24585 || strcmp (language_string, "GNU C17") == 0
24586 || strcmp (language_string, "GNU C2X") == 0)
24587 language = DW_LANG_C11;
24588 }
24589 }
24590 else if (strncmp (language_string, "GNU C++", 7) == 0)
24591 {
24592 language = DW_LANG_C_plus_plus;
24593 if (dwarf_version >= 5 /* || !dwarf_strict */)
24594 {
24595 if (strcmp (language_string, "GNU C++11") == 0)
24596 language = DW_LANG_C_plus_plus_11;
24597 else if (strcmp (language_string, "GNU C++14") == 0)
24598 language = DW_LANG_C_plus_plus_14;
24599 else if (strcmp (language_string, "GNU C++17") == 0
24600 || strcmp (language_string, "GNU C++20") == 0)
24601 /* For now. */
24602 language = DW_LANG_C_plus_plus_14;
24603 }
24604 }
24605 else if (strcmp (language_string, "GNU F77") == 0)
24606 language = DW_LANG_Fortran77;
24607 else if (dwarf_version >= 3 || !dwarf_strict)
24608 {
24609 if (strcmp (language_string, "GNU Ada") == 0)
24610 language = DW_LANG_Ada95;
24611 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24612 {
24613 language = DW_LANG_Fortran95;
24614 if (dwarf_version >= 5 /* || !dwarf_strict */)
24615 {
24616 if (strcmp (language_string, "GNU Fortran2003") == 0)
24617 language = DW_LANG_Fortran03;
24618 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24619 language = DW_LANG_Fortran08;
24620 }
24621 }
24622 else if (strcmp (language_string, "GNU Objective-C") == 0)
24623 language = DW_LANG_ObjC;
24624 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24625 language = DW_LANG_ObjC_plus_plus;
24626 else if (strcmp (language_string, "GNU D") == 0)
24627 language = DW_LANG_D;
24628 else if (dwarf_version >= 5 || !dwarf_strict)
24629 {
24630 if (strcmp (language_string, "GNU Go") == 0)
24631 language = DW_LANG_Go;
24632 }
24633 }
24634 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24635 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24636 language = DW_LANG_Fortran90;
24637 /* Likewise for Ada. */
24638 else if (strcmp (language_string, "GNU Ada") == 0)
24639 language = DW_LANG_Ada83;
24640
24641 add_AT_unsigned (die, DW_AT_language, language);
24642
24643 switch (language)
24644 {
24645 case DW_LANG_Fortran77:
24646 case DW_LANG_Fortran90:
24647 case DW_LANG_Fortran95:
24648 case DW_LANG_Fortran03:
24649 case DW_LANG_Fortran08:
24650 /* Fortran has case insensitive identifiers and the front-end
24651 lowercases everything. */
24652 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24653 break;
24654 default:
24655 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24656 break;
24657 }
24658 return die;
24659 }
24660
24661 /* Generate the DIE for a base class. */
24662
24663 static void
24664 gen_inheritance_die (tree binfo, tree access, tree type,
24665 dw_die_ref context_die)
24666 {
24667 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24668 struct vlr_context ctx = { type, NULL };
24669
24670 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24671 context_die);
24672 add_data_member_location_attribute (die, binfo, &ctx);
24673
24674 if (BINFO_VIRTUAL_P (binfo))
24675 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24676
24677 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24678 children, otherwise the default is DW_ACCESS_public. In DWARF2
24679 the default has always been DW_ACCESS_private. */
24680 if (access == access_public_node)
24681 {
24682 if (dwarf_version == 2
24683 || context_die->die_tag == DW_TAG_class_type)
24684 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24685 }
24686 else if (access == access_protected_node)
24687 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24688 else if (dwarf_version > 2
24689 && context_die->die_tag != DW_TAG_class_type)
24690 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24691 }
24692
24693 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24694 structure. */
24695
24696 static bool
24697 is_variant_part (tree decl)
24698 {
24699 return (TREE_CODE (decl) == FIELD_DECL
24700 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24701 }
24702
24703 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24704 return the FIELD_DECL. Return NULL_TREE otherwise. */
24705
24706 static tree
24707 analyze_discr_in_predicate (tree operand, tree struct_type)
24708 {
24709 while (CONVERT_EXPR_P (operand))
24710 operand = TREE_OPERAND (operand, 0);
24711
24712 /* Match field access to members of struct_type only. */
24713 if (TREE_CODE (operand) == COMPONENT_REF
24714 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24715 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24716 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24717 return TREE_OPERAND (operand, 1);
24718 else
24719 return NULL_TREE;
24720 }
24721
24722 /* Check that SRC is a constant integer that can be represented as a native
24723 integer constant (either signed or unsigned). If so, store it into DEST and
24724 return true. Return false otherwise. */
24725
24726 static bool
24727 get_discr_value (tree src, dw_discr_value *dest)
24728 {
24729 tree discr_type = TREE_TYPE (src);
24730
24731 if (lang_hooks.types.get_debug_type)
24732 {
24733 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24734 if (debug_type != NULL)
24735 discr_type = debug_type;
24736 }
24737
24738 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24739 return false;
24740
24741 /* Signedness can vary between the original type and the debug type. This
24742 can happen for character types in Ada for instance: the character type
24743 used for code generation can be signed, to be compatible with the C one,
24744 but from a debugger point of view, it must be unsigned. */
24745 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24746 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24747
24748 if (is_orig_unsigned != is_debug_unsigned)
24749 src = fold_convert (discr_type, src);
24750
24751 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24752 return false;
24753
24754 dest->pos = is_debug_unsigned;
24755 if (is_debug_unsigned)
24756 dest->v.uval = tree_to_uhwi (src);
24757 else
24758 dest->v.sval = tree_to_shwi (src);
24759
24760 return true;
24761 }
24762
24763 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24764 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24765 store NULL_TREE in DISCR_DECL. Otherwise:
24766
24767 - store the discriminant field in STRUCT_TYPE that controls the variant
24768 part to *DISCR_DECL
24769
24770 - put in *DISCR_LISTS_P an array where for each variant, the item
24771 represents the corresponding matching list of discriminant values.
24772
24773 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24774 the above array.
24775
24776 Note that when the array is allocated (i.e. when the analysis is
24777 successful), it is up to the caller to free the array. */
24778
24779 static void
24780 analyze_variants_discr (tree variant_part_decl,
24781 tree struct_type,
24782 tree *discr_decl,
24783 dw_discr_list_ref **discr_lists_p,
24784 unsigned *discr_lists_length)
24785 {
24786 tree variant_part_type = TREE_TYPE (variant_part_decl);
24787 tree variant;
24788 dw_discr_list_ref *discr_lists;
24789 unsigned i;
24790
24791 /* Compute how many variants there are in this variant part. */
24792 *discr_lists_length = 0;
24793 for (variant = TYPE_FIELDS (variant_part_type);
24794 variant != NULL_TREE;
24795 variant = DECL_CHAIN (variant))
24796 ++*discr_lists_length;
24797
24798 *discr_decl = NULL_TREE;
24799 *discr_lists_p
24800 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24801 sizeof (**discr_lists_p));
24802 discr_lists = *discr_lists_p;
24803
24804 /* And then analyze all variants to extract discriminant information for all
24805 of them. This analysis is conservative: as soon as we detect something we
24806 do not support, abort everything and pretend we found nothing. */
24807 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24808 variant != NULL_TREE;
24809 variant = DECL_CHAIN (variant), ++i)
24810 {
24811 tree match_expr = DECL_QUALIFIER (variant);
24812
24813 /* Now, try to analyze the predicate and deduce a discriminant for
24814 it. */
24815 if (match_expr == boolean_true_node)
24816 /* Typically happens for the default variant: it matches all cases that
24817 previous variants rejected. Don't output any matching value for
24818 this one. */
24819 continue;
24820
24821 /* The following loop tries to iterate over each discriminant
24822 possibility: single values or ranges. */
24823 while (match_expr != NULL_TREE)
24824 {
24825 tree next_round_match_expr;
24826 tree candidate_discr = NULL_TREE;
24827 dw_discr_list_ref new_node = NULL;
24828
24829 /* Possibilities are matched one after the other by nested
24830 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24831 continue with the rest at next iteration. */
24832 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24833 {
24834 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24835 match_expr = TREE_OPERAND (match_expr, 1);
24836 }
24837 else
24838 next_round_match_expr = NULL_TREE;
24839
24840 if (match_expr == boolean_false_node)
24841 /* This sub-expression matches nothing: just wait for the next
24842 one. */
24843 ;
24844
24845 else if (TREE_CODE (match_expr) == EQ_EXPR)
24846 {
24847 /* We are matching: <discr_field> == <integer_cst>
24848 This sub-expression matches a single value. */
24849 tree integer_cst = TREE_OPERAND (match_expr, 1);
24850
24851 candidate_discr
24852 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24853 struct_type);
24854
24855 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24856 if (!get_discr_value (integer_cst,
24857 &new_node->dw_discr_lower_bound))
24858 goto abort;
24859 new_node->dw_discr_range = false;
24860 }
24861
24862 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24863 {
24864 /* We are matching:
24865 <discr_field> > <integer_cst>
24866 && <discr_field> < <integer_cst>.
24867 This sub-expression matches the range of values between the
24868 two matched integer constants. Note that comparisons can be
24869 inclusive or exclusive. */
24870 tree candidate_discr_1, candidate_discr_2;
24871 tree lower_cst, upper_cst;
24872 bool lower_cst_included, upper_cst_included;
24873 tree lower_op = TREE_OPERAND (match_expr, 0);
24874 tree upper_op = TREE_OPERAND (match_expr, 1);
24875
24876 /* When the comparison is exclusive, the integer constant is not
24877 the discriminant range bound we are looking for: we will have
24878 to increment or decrement it. */
24879 if (TREE_CODE (lower_op) == GE_EXPR)
24880 lower_cst_included = true;
24881 else if (TREE_CODE (lower_op) == GT_EXPR)
24882 lower_cst_included = false;
24883 else
24884 goto abort;
24885
24886 if (TREE_CODE (upper_op) == LE_EXPR)
24887 upper_cst_included = true;
24888 else if (TREE_CODE (upper_op) == LT_EXPR)
24889 upper_cst_included = false;
24890 else
24891 goto abort;
24892
24893 /* Extract the discriminant from the first operand and check it
24894 is consistant with the same analysis in the second
24895 operand. */
24896 candidate_discr_1
24897 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24898 struct_type);
24899 candidate_discr_2
24900 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24901 struct_type);
24902 if (candidate_discr_1 == candidate_discr_2)
24903 candidate_discr = candidate_discr_1;
24904 else
24905 goto abort;
24906
24907 /* Extract bounds from both. */
24908 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24909 lower_cst = TREE_OPERAND (lower_op, 1);
24910 upper_cst = TREE_OPERAND (upper_op, 1);
24911
24912 if (!lower_cst_included)
24913 lower_cst
24914 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24915 build_int_cst (TREE_TYPE (lower_cst), 1));
24916 if (!upper_cst_included)
24917 upper_cst
24918 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24919 build_int_cst (TREE_TYPE (upper_cst), 1));
24920
24921 if (!get_discr_value (lower_cst,
24922 &new_node->dw_discr_lower_bound)
24923 || !get_discr_value (upper_cst,
24924 &new_node->dw_discr_upper_bound))
24925 goto abort;
24926
24927 new_node->dw_discr_range = true;
24928 }
24929
24930 else if ((candidate_discr
24931 = analyze_discr_in_predicate (match_expr, struct_type))
24932 && (TREE_TYPE (candidate_discr) == boolean_type_node
24933 || TREE_TYPE (TREE_TYPE (candidate_discr))
24934 == boolean_type_node))
24935 {
24936 /* We are matching: <discr_field> for a boolean discriminant.
24937 This sub-expression matches boolean_true_node. */
24938 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24939 if (!get_discr_value (boolean_true_node,
24940 &new_node->dw_discr_lower_bound))
24941 goto abort;
24942 new_node->dw_discr_range = false;
24943 }
24944
24945 else
24946 /* Unsupported sub-expression: we cannot determine the set of
24947 matching discriminant values. Abort everything. */
24948 goto abort;
24949
24950 /* If the discriminant info is not consistant with what we saw so
24951 far, consider the analysis failed and abort everything. */
24952 if (candidate_discr == NULL_TREE
24953 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24954 goto abort;
24955 else
24956 *discr_decl = candidate_discr;
24957
24958 if (new_node != NULL)
24959 {
24960 new_node->dw_discr_next = discr_lists[i];
24961 discr_lists[i] = new_node;
24962 }
24963 match_expr = next_round_match_expr;
24964 }
24965 }
24966
24967 /* If we reach this point, we could match everything we were interested
24968 in. */
24969 return;
24970
24971 abort:
24972 /* Clean all data structure and return no result. */
24973 free (*discr_lists_p);
24974 *discr_lists_p = NULL;
24975 *discr_decl = NULL_TREE;
24976 }
24977
24978 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24979 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24980 under CONTEXT_DIE.
24981
24982 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24983 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24984 this type, which are record types, represent the available variants and each
24985 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24986 values are inferred from these attributes.
24987
24988 In trees, the offsets for the fields inside these sub-records are relative
24989 to the variant part itself, whereas the corresponding DIEs should have
24990 offset attributes that are relative to the embedding record base address.
24991 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24992 must be an expression that computes the offset of the variant part to
24993 describe in DWARF. */
24994
24995 static void
24996 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24997 dw_die_ref context_die)
24998 {
24999 const tree variant_part_type = TREE_TYPE (variant_part_decl);
25000 tree variant_part_offset = vlr_ctx->variant_part_offset;
25001 struct loc_descr_context ctx = {
25002 vlr_ctx->struct_type, /* context_type */
25003 NULL_TREE, /* base_decl */
25004 NULL, /* dpi */
25005 false, /* placeholder_arg */
25006 false /* placeholder_seen */
25007 };
25008
25009 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
25010 NULL_TREE if there is no such field. */
25011 tree discr_decl = NULL_TREE;
25012 dw_discr_list_ref *discr_lists;
25013 unsigned discr_lists_length = 0;
25014 unsigned i;
25015
25016 dw_die_ref dwarf_proc_die = NULL;
25017 dw_die_ref variant_part_die
25018 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
25019
25020 equate_decl_number_to_die (variant_part_decl, variant_part_die);
25021
25022 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
25023 &discr_decl, &discr_lists, &discr_lists_length);
25024
25025 if (discr_decl != NULL_TREE)
25026 {
25027 dw_die_ref discr_die = lookup_decl_die (discr_decl);
25028
25029 if (discr_die)
25030 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
25031 else
25032 /* We have no DIE for the discriminant, so just discard all
25033 discrimimant information in the output. */
25034 discr_decl = NULL_TREE;
25035 }
25036
25037 /* If the offset for this variant part is more complex than a constant,
25038 create a DWARF procedure for it so that we will not have to generate DWARF
25039 expressions for it for each member. */
25040 if (TREE_CODE (variant_part_offset) != INTEGER_CST
25041 && (dwarf_version >= 3 || !dwarf_strict))
25042 {
25043 const tree dwarf_proc_fndecl
25044 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
25045 build_function_type (TREE_TYPE (variant_part_offset),
25046 NULL_TREE));
25047 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25048 const dw_loc_descr_ref dwarf_proc_body
25049 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25050
25051 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25052 dwarf_proc_fndecl, context_die);
25053 if (dwarf_proc_die != NULL)
25054 variant_part_offset = dwarf_proc_call;
25055 }
25056
25057 /* Output DIEs for all variants. */
25058 i = 0;
25059 for (tree variant = TYPE_FIELDS (variant_part_type);
25060 variant != NULL_TREE;
25061 variant = DECL_CHAIN (variant), ++i)
25062 {
25063 tree variant_type = TREE_TYPE (variant);
25064 dw_die_ref variant_die;
25065
25066 /* All variants (i.e. members of a variant part) are supposed to be
25067 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25068 under these records. */
25069 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25070
25071 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25072 equate_decl_number_to_die (variant, variant_die);
25073
25074 /* Output discriminant values this variant matches, if any. */
25075 if (discr_decl == NULL || discr_lists[i] == NULL)
25076 /* In the case we have discriminant information at all, this is
25077 probably the default variant: as the standard says, don't
25078 output any discriminant value/list attribute. */
25079 ;
25080 else if (discr_lists[i]->dw_discr_next == NULL
25081 && !discr_lists[i]->dw_discr_range)
25082 /* If there is only one accepted value, don't bother outputting a
25083 list. */
25084 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25085 else
25086 add_discr_list (variant_die, discr_lists[i]);
25087
25088 for (tree member = TYPE_FIELDS (variant_type);
25089 member != NULL_TREE;
25090 member = DECL_CHAIN (member))
25091 {
25092 struct vlr_context vlr_sub_ctx = {
25093 vlr_ctx->struct_type, /* struct_type */
25094 NULL /* variant_part_offset */
25095 };
25096 if (is_variant_part (member))
25097 {
25098 /* All offsets for fields inside variant parts are relative to
25099 the top-level embedding RECORD_TYPE's base address. On the
25100 other hand, offsets in GCC's types are relative to the
25101 nested-most variant part. So we have to sum offsets each time
25102 we recurse. */
25103
25104 vlr_sub_ctx.variant_part_offset
25105 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25106 variant_part_offset, byte_position (member));
25107 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25108 }
25109 else
25110 {
25111 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25112 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25113 }
25114 }
25115 }
25116
25117 free (discr_lists);
25118 }
25119
25120 /* Generate a DIE for a class member. */
25121
25122 static void
25123 gen_member_die (tree type, dw_die_ref context_die)
25124 {
25125 tree member;
25126 tree binfo = TYPE_BINFO (type);
25127
25128 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25129
25130 /* If this is not an incomplete type, output descriptions of each of its
25131 members. Note that as we output the DIEs necessary to represent the
25132 members of this record or union type, we will also be trying to output
25133 DIEs to represent the *types* of those members. However the `type'
25134 function (above) will specifically avoid generating type DIEs for member
25135 types *within* the list of member DIEs for this (containing) type except
25136 for those types (of members) which are explicitly marked as also being
25137 members of this (containing) type themselves. The g++ front- end can
25138 force any given type to be treated as a member of some other (containing)
25139 type by setting the TYPE_CONTEXT of the given (member) type to point to
25140 the TREE node representing the appropriate (containing) type. */
25141
25142 /* First output info about the base classes. */
25143 if (binfo && early_dwarf)
25144 {
25145 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25146 int i;
25147 tree base;
25148
25149 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25150 gen_inheritance_die (base,
25151 (accesses ? (*accesses)[i] : access_public_node),
25152 type,
25153 context_die);
25154 }
25155
25156 /* Now output info about the members. */
25157 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25158 {
25159 /* Ignore clones. */
25160 if (DECL_ABSTRACT_ORIGIN (member))
25161 continue;
25162
25163 struct vlr_context vlr_ctx = { type, NULL_TREE };
25164 bool static_inline_p
25165 = (VAR_P (member)
25166 && TREE_STATIC (member)
25167 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25168 != -1));
25169
25170 /* If we thought we were generating minimal debug info for TYPE
25171 and then changed our minds, some of the member declarations
25172 may have already been defined. Don't define them again, but
25173 do put them in the right order. */
25174
25175 if (dw_die_ref child = lookup_decl_die (member))
25176 {
25177 /* Handle inline static data members, which only have in-class
25178 declarations. */
25179 bool splice = true;
25180
25181 dw_die_ref ref = NULL;
25182 if (child->die_tag == DW_TAG_variable
25183 && child->die_parent == comp_unit_die ())
25184 {
25185 ref = get_AT_ref (child, DW_AT_specification);
25186
25187 /* For C++17 inline static data members followed by redundant
25188 out of class redeclaration, we might get here with
25189 child being the DIE created for the out of class
25190 redeclaration and with its DW_AT_specification being
25191 the DIE created for in-class definition. We want to
25192 reparent the latter, and don't want to create another
25193 DIE with DW_AT_specification in that case, because
25194 we already have one. */
25195 if (ref
25196 && static_inline_p
25197 && ref->die_tag == DW_TAG_variable
25198 && ref->die_parent == comp_unit_die ()
25199 && get_AT (ref, DW_AT_specification) == NULL)
25200 {
25201 child = ref;
25202 ref = NULL;
25203 static_inline_p = false;
25204 }
25205
25206 if (!ref)
25207 {
25208 reparent_child (child, context_die);
25209 if (dwarf_version < 5)
25210 child->die_tag = DW_TAG_member;
25211 splice = false;
25212 }
25213 }
25214 else if (child->die_tag == DW_TAG_enumerator)
25215 /* Enumerators remain under their enumeration even if
25216 their names are introduced in the enclosing scope. */
25217 splice = false;
25218
25219 if (splice)
25220 splice_child_die (context_die, child);
25221 }
25222
25223 /* Do not generate standard DWARF for variant parts if we are generating
25224 the corresponding GNAT encodings: DIEs generated for both would
25225 conflict in our mappings. */
25226 else if (is_variant_part (member)
25227 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25228 {
25229 vlr_ctx.variant_part_offset = byte_position (member);
25230 gen_variant_part (member, &vlr_ctx, context_die);
25231 }
25232 else
25233 {
25234 vlr_ctx.variant_part_offset = NULL_TREE;
25235 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25236 }
25237
25238 /* For C++ inline static data members emit immediately a DW_TAG_variable
25239 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25240 DW_AT_specification. */
25241 if (static_inline_p)
25242 {
25243 int old_extern = DECL_EXTERNAL (member);
25244 DECL_EXTERNAL (member) = 0;
25245 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25246 DECL_EXTERNAL (member) = old_extern;
25247 }
25248 }
25249 }
25250
25251 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25252 is set, we pretend that the type was never defined, so we only get the
25253 member DIEs needed by later specification DIEs. */
25254
25255 static void
25256 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25257 enum debug_info_usage usage)
25258 {
25259 if (TREE_ASM_WRITTEN (type))
25260 {
25261 /* Fill in the bound of variable-length fields in late dwarf if
25262 still incomplete. */
25263 if (!early_dwarf && variably_modified_type_p (type, NULL))
25264 for (tree member = TYPE_FIELDS (type);
25265 member;
25266 member = DECL_CHAIN (member))
25267 fill_variable_array_bounds (TREE_TYPE (member));
25268 return;
25269 }
25270
25271 dw_die_ref type_die = lookup_type_die (type);
25272 dw_die_ref scope_die = 0;
25273 int nested = 0;
25274 int complete = (TYPE_SIZE (type)
25275 && (! TYPE_STUB_DECL (type)
25276 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25277 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25278 complete = complete && should_emit_struct_debug (type, usage);
25279
25280 if (type_die && ! complete)
25281 return;
25282
25283 if (TYPE_CONTEXT (type) != NULL_TREE
25284 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25285 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25286 nested = 1;
25287
25288 scope_die = scope_die_for (type, context_die);
25289
25290 /* Generate child dies for template paramaters. */
25291 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25292 schedule_generic_params_dies_gen (type);
25293
25294 if (! type_die || (nested && is_cu_die (scope_die)))
25295 /* First occurrence of type or toplevel definition of nested class. */
25296 {
25297 dw_die_ref old_die = type_die;
25298
25299 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25300 ? record_type_tag (type) : DW_TAG_union_type,
25301 scope_die, type);
25302 equate_type_number_to_die (type, type_die);
25303 if (old_die)
25304 add_AT_specification (type_die, old_die);
25305 else
25306 add_name_attribute (type_die, type_tag (type));
25307 }
25308 else
25309 remove_AT (type_die, DW_AT_declaration);
25310
25311 /* If this type has been completed, then give it a byte_size attribute and
25312 then give a list of members. */
25313 if (complete && !ns_decl)
25314 {
25315 /* Prevent infinite recursion in cases where the type of some member of
25316 this type is expressed in terms of this type itself. */
25317 TREE_ASM_WRITTEN (type) = 1;
25318 add_byte_size_attribute (type_die, type);
25319 add_alignment_attribute (type_die, type);
25320 if (TYPE_STUB_DECL (type) != NULL_TREE)
25321 {
25322 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25323 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25324 }
25325
25326 /* If the first reference to this type was as the return type of an
25327 inline function, then it may not have a parent. Fix this now. */
25328 if (type_die->die_parent == NULL)
25329 add_child_die (scope_die, type_die);
25330
25331 gen_member_die (type, type_die);
25332
25333 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25334 if (TYPE_ARTIFICIAL (type))
25335 add_AT_flag (type_die, DW_AT_artificial, 1);
25336
25337 /* GNU extension: Record what type our vtable lives in. */
25338 if (TYPE_VFIELD (type))
25339 {
25340 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25341
25342 gen_type_die (vtype, context_die);
25343 add_AT_die_ref (type_die, DW_AT_containing_type,
25344 lookup_type_die (vtype));
25345 }
25346 }
25347 else
25348 {
25349 add_AT_flag (type_die, DW_AT_declaration, 1);
25350
25351 /* We don't need to do this for function-local types. */
25352 if (TYPE_STUB_DECL (type)
25353 && ! decl_function_context (TYPE_STUB_DECL (type)))
25354 vec_safe_push (incomplete_types, type);
25355 }
25356
25357 if (get_AT (type_die, DW_AT_name))
25358 add_pubtype (type, type_die);
25359 }
25360
25361 /* Generate a DIE for a subroutine _type_. */
25362
25363 static void
25364 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25365 {
25366 tree return_type = TREE_TYPE (type);
25367 dw_die_ref subr_die
25368 = new_die (DW_TAG_subroutine_type,
25369 scope_die_for (type, context_die), type);
25370
25371 equate_type_number_to_die (type, subr_die);
25372 add_prototyped_attribute (subr_die, type);
25373 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25374 context_die);
25375 add_alignment_attribute (subr_die, type);
25376 gen_formal_types_die (type, subr_die);
25377
25378 if (get_AT (subr_die, DW_AT_name))
25379 add_pubtype (type, subr_die);
25380 if ((dwarf_version >= 5 || !dwarf_strict)
25381 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25382 add_AT_flag (subr_die, DW_AT_reference, 1);
25383 if ((dwarf_version >= 5 || !dwarf_strict)
25384 && lang_hooks.types.type_dwarf_attribute (type,
25385 DW_AT_rvalue_reference) != -1)
25386 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25387 }
25388
25389 /* Generate a DIE for a type definition. */
25390
25391 static void
25392 gen_typedef_die (tree decl, dw_die_ref context_die)
25393 {
25394 dw_die_ref type_die;
25395 tree type;
25396
25397 if (TREE_ASM_WRITTEN (decl))
25398 {
25399 if (DECL_ORIGINAL_TYPE (decl))
25400 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25401 return;
25402 }
25403
25404 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25405 checks in process_scope_var and modified_type_die), this should be called
25406 only for original types. */
25407 gcc_assert (decl_ultimate_origin (decl) == NULL
25408 || decl_ultimate_origin (decl) == decl);
25409
25410 TREE_ASM_WRITTEN (decl) = 1;
25411 type_die = new_die (DW_TAG_typedef, context_die, decl);
25412
25413 add_name_and_src_coords_attributes (type_die, decl);
25414 if (DECL_ORIGINAL_TYPE (decl))
25415 {
25416 type = DECL_ORIGINAL_TYPE (decl);
25417 if (type == error_mark_node)
25418 return;
25419
25420 gcc_assert (type != TREE_TYPE (decl));
25421 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25422 }
25423 else
25424 {
25425 type = TREE_TYPE (decl);
25426 if (type == error_mark_node)
25427 return;
25428
25429 if (is_naming_typedef_decl (TYPE_NAME (type)))
25430 {
25431 /* Here, we are in the case of decl being a typedef naming
25432 an anonymous type, e.g:
25433 typedef struct {...} foo;
25434 In that case TREE_TYPE (decl) is not a typedef variant
25435 type and TYPE_NAME of the anonymous type is set to the
25436 TYPE_DECL of the typedef. This construct is emitted by
25437 the C++ FE.
25438
25439 TYPE is the anonymous struct named by the typedef
25440 DECL. As we need the DW_AT_type attribute of the
25441 DW_TAG_typedef to point to the DIE of TYPE, let's
25442 generate that DIE right away. add_type_attribute
25443 called below will then pick (via lookup_type_die) that
25444 anonymous struct DIE. */
25445 if (!TREE_ASM_WRITTEN (type))
25446 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25447
25448 /* This is a GNU Extension. We are adding a
25449 DW_AT_linkage_name attribute to the DIE of the
25450 anonymous struct TYPE. The value of that attribute
25451 is the name of the typedef decl naming the anonymous
25452 struct. This greatly eases the work of consumers of
25453 this debug info. */
25454 add_linkage_name_raw (lookup_type_die (type), decl);
25455 }
25456 }
25457
25458 add_type_attribute (type_die, type, decl_quals (decl), false,
25459 context_die);
25460
25461 if (is_naming_typedef_decl (decl))
25462 /* We want that all subsequent calls to lookup_type_die with
25463 TYPE in argument yield the DW_TAG_typedef we have just
25464 created. */
25465 equate_type_number_to_die (type, type_die);
25466
25467 add_alignment_attribute (type_die, TREE_TYPE (decl));
25468
25469 add_accessibility_attribute (type_die, decl);
25470
25471 if (DECL_ABSTRACT_P (decl))
25472 equate_decl_number_to_die (decl, type_die);
25473
25474 if (get_AT (type_die, DW_AT_name))
25475 add_pubtype (decl, type_die);
25476 }
25477
25478 /* Generate a DIE for a struct, class, enum or union type. */
25479
25480 static void
25481 gen_tagged_type_die (tree type,
25482 dw_die_ref context_die,
25483 enum debug_info_usage usage)
25484 {
25485 if (type == NULL_TREE
25486 || !is_tagged_type (type))
25487 return;
25488
25489 if (TREE_ASM_WRITTEN (type))
25490 ;
25491 /* If this is a nested type whose containing class hasn't been written
25492 out yet, writing it out will cover this one, too. This does not apply
25493 to instantiations of member class templates; they need to be added to
25494 the containing class as they are generated. FIXME: This hurts the
25495 idea of combining type decls from multiple TUs, since we can't predict
25496 what set of template instantiations we'll get. */
25497 else if (TYPE_CONTEXT (type)
25498 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25499 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25500 {
25501 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25502
25503 if (TREE_ASM_WRITTEN (type))
25504 return;
25505
25506 /* If that failed, attach ourselves to the stub. */
25507 context_die = lookup_type_die (TYPE_CONTEXT (type));
25508 }
25509 else if (TYPE_CONTEXT (type) != NULL_TREE
25510 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25511 {
25512 /* If this type is local to a function that hasn't been written
25513 out yet, use a NULL context for now; it will be fixed up in
25514 decls_for_scope. */
25515 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25516 /* A declaration DIE doesn't count; nested types need to go in the
25517 specification. */
25518 if (context_die && is_declaration_die (context_die))
25519 context_die = NULL;
25520 }
25521 else
25522 context_die = declare_in_namespace (type, context_die);
25523
25524 if (TREE_CODE (type) == ENUMERAL_TYPE)
25525 {
25526 /* This might have been written out by the call to
25527 declare_in_namespace. */
25528 if (!TREE_ASM_WRITTEN (type))
25529 gen_enumeration_type_die (type, context_die);
25530 }
25531 else
25532 gen_struct_or_union_type_die (type, context_die, usage);
25533
25534 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25535 it up if it is ever completed. gen_*_type_die will set it for us
25536 when appropriate. */
25537 }
25538
25539 /* Generate a type description DIE. */
25540
25541 static void
25542 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25543 enum debug_info_usage usage)
25544 {
25545 struct array_descr_info info;
25546
25547 if (type == NULL_TREE || type == error_mark_node)
25548 return;
25549
25550 if (flag_checking && type)
25551 verify_type (type);
25552
25553 if (TYPE_NAME (type) != NULL_TREE
25554 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25555 && is_redundant_typedef (TYPE_NAME (type))
25556 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25557 /* The DECL of this type is a typedef we don't want to emit debug
25558 info for but we want debug info for its underlying typedef.
25559 This can happen for e.g, the injected-class-name of a C++
25560 type. */
25561 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25562
25563 /* If TYPE is a typedef type variant, let's generate debug info
25564 for the parent typedef which TYPE is a type of. */
25565 if (typedef_variant_p (type))
25566 {
25567 if (TREE_ASM_WRITTEN (type))
25568 return;
25569
25570 tree name = TYPE_NAME (type);
25571 tree origin = decl_ultimate_origin (name);
25572 if (origin != NULL && origin != name)
25573 {
25574 gen_decl_die (origin, NULL, NULL, context_die);
25575 return;
25576 }
25577
25578 /* Prevent broken recursion; we can't hand off to the same type. */
25579 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25580
25581 /* Give typedefs the right scope. */
25582 context_die = scope_die_for (type, context_die);
25583
25584 TREE_ASM_WRITTEN (type) = 1;
25585
25586 gen_decl_die (name, NULL, NULL, context_die);
25587 return;
25588 }
25589
25590 /* If type is an anonymous tagged type named by a typedef, let's
25591 generate debug info for the typedef. */
25592 if (is_naming_typedef_decl (TYPE_NAME (type)))
25593 {
25594 /* Give typedefs the right scope. */
25595 context_die = scope_die_for (type, context_die);
25596
25597 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25598 return;
25599 }
25600
25601 if (lang_hooks.types.get_debug_type)
25602 {
25603 tree debug_type = lang_hooks.types.get_debug_type (type);
25604
25605 if (debug_type != NULL_TREE && debug_type != type)
25606 {
25607 gen_type_die_with_usage (debug_type, context_die, usage);
25608 return;
25609 }
25610 }
25611
25612 /* We are going to output a DIE to represent the unqualified version
25613 of this type (i.e. without any const or volatile qualifiers) so
25614 get the main variant (i.e. the unqualified version) of this type
25615 now. (Vectors and arrays are special because the debugging info is in the
25616 cloned type itself. Similarly function/method types can contain extra
25617 ref-qualification). */
25618 if (TREE_CODE (type) == FUNCTION_TYPE
25619 || TREE_CODE (type) == METHOD_TYPE)
25620 {
25621 /* For function/method types, can't use type_main_variant here,
25622 because that can have different ref-qualifiers for C++,
25623 but try to canonicalize. */
25624 tree main = TYPE_MAIN_VARIANT (type);
25625 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25626 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25627 && check_base_type (t, main)
25628 && check_lang_type (t, type))
25629 {
25630 type = t;
25631 break;
25632 }
25633 }
25634 else if (TREE_CODE (type) != VECTOR_TYPE
25635 && TREE_CODE (type) != ARRAY_TYPE)
25636 type = type_main_variant (type);
25637
25638 /* If this is an array type with hidden descriptor, handle it first. */
25639 if (!TREE_ASM_WRITTEN (type)
25640 && lang_hooks.types.get_array_descr_info)
25641 {
25642 memset (&info, 0, sizeof (info));
25643 if (lang_hooks.types.get_array_descr_info (type, &info))
25644 {
25645 /* Fortran sometimes emits array types with no dimension. */
25646 gcc_assert (info.ndimensions >= 0
25647 && (info.ndimensions
25648 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25649 gen_descr_array_type_die (type, &info, context_die);
25650 TREE_ASM_WRITTEN (type) = 1;
25651 return;
25652 }
25653 }
25654
25655 if (TREE_ASM_WRITTEN (type))
25656 {
25657 /* Variable-length types may be incomplete even if
25658 TREE_ASM_WRITTEN. For such types, fall through to
25659 gen_array_type_die() and possibly fill in
25660 DW_AT_{upper,lower}_bound attributes. */
25661 if ((TREE_CODE (type) != ARRAY_TYPE
25662 && TREE_CODE (type) != RECORD_TYPE
25663 && TREE_CODE (type) != UNION_TYPE
25664 && TREE_CODE (type) != QUAL_UNION_TYPE)
25665 || !variably_modified_type_p (type, NULL))
25666 return;
25667 }
25668
25669 switch (TREE_CODE (type))
25670 {
25671 case ERROR_MARK:
25672 break;
25673
25674 case POINTER_TYPE:
25675 case REFERENCE_TYPE:
25676 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25677 ensures that the gen_type_die recursion will terminate even if the
25678 type is recursive. Recursive types are possible in Ada. */
25679 /* ??? We could perhaps do this for all types before the switch
25680 statement. */
25681 TREE_ASM_WRITTEN (type) = 1;
25682
25683 /* For these types, all that is required is that we output a DIE (or a
25684 set of DIEs) to represent the "basis" type. */
25685 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25686 DINFO_USAGE_IND_USE);
25687 break;
25688
25689 case OFFSET_TYPE:
25690 /* This code is used for C++ pointer-to-data-member types.
25691 Output a description of the relevant class type. */
25692 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25693 DINFO_USAGE_IND_USE);
25694
25695 /* Output a description of the type of the object pointed to. */
25696 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25697 DINFO_USAGE_IND_USE);
25698
25699 /* Now output a DIE to represent this pointer-to-data-member type
25700 itself. */
25701 gen_ptr_to_mbr_type_die (type, context_die);
25702 break;
25703
25704 case FUNCTION_TYPE:
25705 /* Force out return type (in case it wasn't forced out already). */
25706 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25707 DINFO_USAGE_DIR_USE);
25708 gen_subroutine_type_die (type, context_die);
25709 break;
25710
25711 case METHOD_TYPE:
25712 /* Force out return type (in case it wasn't forced out already). */
25713 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25714 DINFO_USAGE_DIR_USE);
25715 gen_subroutine_type_die (type, context_die);
25716 break;
25717
25718 case ARRAY_TYPE:
25719 case VECTOR_TYPE:
25720 gen_array_type_die (type, context_die);
25721 break;
25722
25723 case ENUMERAL_TYPE:
25724 case RECORD_TYPE:
25725 case UNION_TYPE:
25726 case QUAL_UNION_TYPE:
25727 gen_tagged_type_die (type, context_die, usage);
25728 return;
25729
25730 case VOID_TYPE:
25731 case OPAQUE_TYPE:
25732 case INTEGER_TYPE:
25733 case REAL_TYPE:
25734 case FIXED_POINT_TYPE:
25735 case COMPLEX_TYPE:
25736 case BOOLEAN_TYPE:
25737 /* No DIEs needed for fundamental types. */
25738 break;
25739
25740 case NULLPTR_TYPE:
25741 case LANG_TYPE:
25742 /* Just use DW_TAG_unspecified_type. */
25743 {
25744 dw_die_ref type_die = lookup_type_die (type);
25745 if (type_die == NULL)
25746 {
25747 tree name = TYPE_IDENTIFIER (type);
25748 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25749 type);
25750 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25751 equate_type_number_to_die (type, type_die);
25752 }
25753 }
25754 break;
25755
25756 default:
25757 if (is_cxx_auto (type))
25758 {
25759 tree name = TYPE_IDENTIFIER (type);
25760 dw_die_ref *die = (name == get_identifier ("auto")
25761 ? &auto_die : &decltype_auto_die);
25762 if (!*die)
25763 {
25764 *die = new_die (DW_TAG_unspecified_type,
25765 comp_unit_die (), NULL_TREE);
25766 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25767 }
25768 equate_type_number_to_die (type, *die);
25769 break;
25770 }
25771 gcc_unreachable ();
25772 }
25773
25774 TREE_ASM_WRITTEN (type) = 1;
25775 }
25776
25777 static void
25778 gen_type_die (tree type, dw_die_ref context_die)
25779 {
25780 if (type != error_mark_node)
25781 {
25782 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25783 if (flag_checking)
25784 {
25785 dw_die_ref die = lookup_type_die (type);
25786 if (die)
25787 check_die (die);
25788 }
25789 }
25790 }
25791
25792 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25793 things which are local to the given block. */
25794
25795 static void
25796 gen_block_die (tree stmt, dw_die_ref context_die)
25797 {
25798 int must_output_die = 0;
25799 bool inlined_func;
25800
25801 /* Ignore blocks that are NULL. */
25802 if (stmt == NULL_TREE)
25803 return;
25804
25805 inlined_func = inlined_function_outer_scope_p (stmt);
25806
25807 /* If the block is one fragment of a non-contiguous block, do not
25808 process the variables, since they will have been done by the
25809 origin block. Do process subblocks. */
25810 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25811 {
25812 tree sub;
25813
25814 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25815 gen_block_die (sub, context_die);
25816
25817 return;
25818 }
25819
25820 /* Determine if we need to output any Dwarf DIEs at all to represent this
25821 block. */
25822 if (inlined_func)
25823 /* The outer scopes for inlinings *must* always be represented. We
25824 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25825 must_output_die = 1;
25826 else if (lookup_block_die (stmt))
25827 /* If we already have a DIE then it was filled early. Meanwhile
25828 we might have pruned all BLOCK_VARS as optimized out but we
25829 still want to generate high/low PC attributes so output it. */
25830 must_output_die = 1;
25831 else if (TREE_USED (stmt)
25832 || TREE_ASM_WRITTEN (stmt))
25833 {
25834 /* Determine if this block directly contains any "significant"
25835 local declarations which we will need to output DIEs for. */
25836 if (debug_info_level > DINFO_LEVEL_TERSE)
25837 {
25838 /* We are not in terse mode so any local declaration that
25839 is not ignored for debug purposes counts as being a
25840 "significant" one. */
25841 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25842 must_output_die = 1;
25843 else
25844 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25845 if (!DECL_IGNORED_P (var))
25846 {
25847 must_output_die = 1;
25848 break;
25849 }
25850 }
25851 else if (!dwarf2out_ignore_block (stmt))
25852 must_output_die = 1;
25853 }
25854
25855 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25856 DIE for any block which contains no significant local declarations at
25857 all. Rather, in such cases we just call `decls_for_scope' so that any
25858 needed Dwarf info for any sub-blocks will get properly generated. Note
25859 that in terse mode, our definition of what constitutes a "significant"
25860 local declaration gets restricted to include only inlined function
25861 instances and local (nested) function definitions. */
25862 if (must_output_die)
25863 {
25864 if (inlined_func)
25865 gen_inlined_subroutine_die (stmt, context_die);
25866 else
25867 gen_lexical_block_die (stmt, context_die);
25868 }
25869 else
25870 decls_for_scope (stmt, context_die);
25871 }
25872
25873 /* Process variable DECL (or variable with origin ORIGIN) within
25874 block STMT and add it to CONTEXT_DIE. */
25875 static void
25876 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25877 {
25878 dw_die_ref die;
25879 tree decl_or_origin = decl ? decl : origin;
25880
25881 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25882 die = lookup_decl_die (decl_or_origin);
25883 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25884 {
25885 if (TYPE_DECL_IS_STUB (decl_or_origin))
25886 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25887 else
25888 die = lookup_decl_die (decl_or_origin);
25889 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25890 if (! die && ! early_dwarf)
25891 return;
25892 }
25893 else
25894 die = NULL;
25895
25896 /* Avoid creating DIEs for local typedefs and concrete static variables that
25897 will only be pruned later. */
25898 if ((origin || decl_ultimate_origin (decl))
25899 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25900 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25901 {
25902 origin = decl_ultimate_origin (decl_or_origin);
25903 if (decl && VAR_P (decl) && die != NULL)
25904 {
25905 die = lookup_decl_die (origin);
25906 if (die != NULL)
25907 equate_decl_number_to_die (decl, die);
25908 }
25909 return;
25910 }
25911
25912 if (die != NULL && die->die_parent == NULL)
25913 add_child_die (context_die, die);
25914 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25915 {
25916 if (early_dwarf)
25917 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25918 stmt, context_die);
25919 }
25920 else
25921 {
25922 if (decl && DECL_P (decl))
25923 {
25924 die = lookup_decl_die (decl);
25925
25926 /* Early created DIEs do not have a parent as the decls refer
25927 to the function as DECL_CONTEXT rather than the BLOCK. */
25928 if (die && die->die_parent == NULL)
25929 {
25930 gcc_assert (in_lto_p);
25931 add_child_die (context_die, die);
25932 }
25933 }
25934
25935 gen_decl_die (decl, origin, NULL, context_die);
25936 }
25937 }
25938
25939 /* Generate all of the decls declared within a given scope and (recursively)
25940 all of its sub-blocks. */
25941
25942 static void
25943 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25944 {
25945 tree decl;
25946 unsigned int i;
25947 tree subblocks;
25948
25949 /* Ignore NULL blocks. */
25950 if (stmt == NULL_TREE)
25951 return;
25952
25953 /* Output the DIEs to represent all of the data objects and typedefs
25954 declared directly within this block but not within any nested
25955 sub-blocks. Also, nested function and tag DIEs have been
25956 generated with a parent of NULL; fix that up now. We don't
25957 have to do this if we're at -g1. */
25958 if (debug_info_level > DINFO_LEVEL_TERSE)
25959 {
25960 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25961 process_scope_var (stmt, decl, NULL_TREE, context_die);
25962 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25963 origin - avoid doing this twice as we have no good way to see
25964 if we've done it once already. */
25965 if (! early_dwarf)
25966 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25967 {
25968 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25969 if (decl == current_function_decl)
25970 /* Ignore declarations of the current function, while they
25971 are declarations, gen_subprogram_die would treat them
25972 as definitions again, because they are equal to
25973 current_function_decl and endlessly recurse. */;
25974 else if (TREE_CODE (decl) == FUNCTION_DECL)
25975 process_scope_var (stmt, decl, NULL_TREE, context_die);
25976 else
25977 process_scope_var (stmt, NULL_TREE, decl, context_die);
25978 }
25979 }
25980
25981 /* Even if we're at -g1, we need to process the subblocks in order to get
25982 inlined call information. */
25983
25984 /* Output the DIEs to represent all sub-blocks (and the items declared
25985 therein) of this block. */
25986 if (recurse)
25987 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25988 subblocks != NULL;
25989 subblocks = BLOCK_CHAIN (subblocks))
25990 gen_block_die (subblocks, context_die);
25991 }
25992
25993 /* Is this a typedef we can avoid emitting? */
25994
25995 static bool
25996 is_redundant_typedef (const_tree decl)
25997 {
25998 if (TYPE_DECL_IS_STUB (decl))
25999 return true;
26000
26001 if (DECL_ARTIFICIAL (decl)
26002 && DECL_CONTEXT (decl)
26003 && is_tagged_type (DECL_CONTEXT (decl))
26004 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
26005 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
26006 /* Also ignore the artificial member typedef for the class name. */
26007 return true;
26008
26009 return false;
26010 }
26011
26012 /* Return TRUE if TYPE is a typedef that names a type for linkage
26013 purposes. This kind of typedefs is produced by the C++ FE for
26014 constructs like:
26015
26016 typedef struct {...} foo;
26017
26018 In that case, there is no typedef variant type produced for foo.
26019 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
26020 struct type. */
26021
26022 static bool
26023 is_naming_typedef_decl (const_tree decl)
26024 {
26025 if (decl == NULL_TREE
26026 || TREE_CODE (decl) != TYPE_DECL
26027 || DECL_NAMELESS (decl)
26028 || !is_tagged_type (TREE_TYPE (decl))
26029 || DECL_IS_UNDECLARED_BUILTIN (decl)
26030 || is_redundant_typedef (decl)
26031 /* It looks like Ada produces TYPE_DECLs that are very similar
26032 to C++ naming typedefs but that have different
26033 semantics. Let's be specific to c++ for now. */
26034 || !is_cxx (decl))
26035 return FALSE;
26036
26037 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
26038 && TYPE_NAME (TREE_TYPE (decl)) == decl
26039 && (TYPE_STUB_DECL (TREE_TYPE (decl))
26040 != TYPE_NAME (TREE_TYPE (decl))));
26041 }
26042
26043 /* Looks up the DIE for a context. */
26044
26045 static inline dw_die_ref
26046 lookup_context_die (tree context)
26047 {
26048 if (context)
26049 {
26050 /* Find die that represents this context. */
26051 if (TYPE_P (context))
26052 {
26053 context = TYPE_MAIN_VARIANT (context);
26054 dw_die_ref ctx = lookup_type_die (context);
26055 if (!ctx)
26056 return NULL;
26057 return strip_naming_typedef (context, ctx);
26058 }
26059 else
26060 return lookup_decl_die (context);
26061 }
26062 return comp_unit_die ();
26063 }
26064
26065 /* Returns the DIE for a context. */
26066
26067 static inline dw_die_ref
26068 get_context_die (tree context)
26069 {
26070 if (context)
26071 {
26072 /* Find die that represents this context. */
26073 if (TYPE_P (context))
26074 {
26075 context = TYPE_MAIN_VARIANT (context);
26076 return strip_naming_typedef (context, force_type_die (context));
26077 }
26078 else
26079 return force_decl_die (context);
26080 }
26081 return comp_unit_die ();
26082 }
26083
26084 /* Returns the DIE for decl. A DIE will always be returned. */
26085
26086 static dw_die_ref
26087 force_decl_die (tree decl)
26088 {
26089 dw_die_ref decl_die;
26090 unsigned saved_external_flag;
26091 tree save_fn = NULL_TREE;
26092 decl_die = lookup_decl_die (decl);
26093 if (!decl_die)
26094 {
26095 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26096
26097 decl_die = lookup_decl_die (decl);
26098 if (decl_die)
26099 return decl_die;
26100
26101 switch (TREE_CODE (decl))
26102 {
26103 case FUNCTION_DECL:
26104 /* Clear current_function_decl, so that gen_subprogram_die thinks
26105 that this is a declaration. At this point, we just want to force
26106 declaration die. */
26107 save_fn = current_function_decl;
26108 current_function_decl = NULL_TREE;
26109 gen_subprogram_die (decl, context_die);
26110 current_function_decl = save_fn;
26111 break;
26112
26113 case VAR_DECL:
26114 /* Set external flag to force declaration die. Restore it after
26115 gen_decl_die() call. */
26116 saved_external_flag = DECL_EXTERNAL (decl);
26117 DECL_EXTERNAL (decl) = 1;
26118 gen_decl_die (decl, NULL, NULL, context_die);
26119 DECL_EXTERNAL (decl) = saved_external_flag;
26120 break;
26121
26122 case NAMESPACE_DECL:
26123 if (dwarf_version >= 3 || !dwarf_strict)
26124 dwarf2out_decl (decl);
26125 else
26126 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26127 decl_die = comp_unit_die ();
26128 break;
26129
26130 case CONST_DECL:
26131 /* Enumerators shouldn't need force_decl_die. */
26132 gcc_assert (DECL_CONTEXT (decl) == NULL_TREE
26133 || TREE_CODE (DECL_CONTEXT (decl)) != ENUMERAL_TYPE);
26134 gen_decl_die (decl, NULL, NULL, context_die);
26135 break;
26136
26137 case TRANSLATION_UNIT_DECL:
26138 decl_die = comp_unit_die ();
26139 break;
26140
26141 default:
26142 gcc_unreachable ();
26143 }
26144
26145 /* We should be able to find the DIE now. */
26146 if (!decl_die)
26147 decl_die = lookup_decl_die (decl);
26148 gcc_assert (decl_die);
26149 }
26150
26151 return decl_die;
26152 }
26153
26154 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26155 always returned. */
26156
26157 static dw_die_ref
26158 force_type_die (tree type)
26159 {
26160 dw_die_ref type_die;
26161
26162 type_die = lookup_type_die (type);
26163 if (!type_die)
26164 {
26165 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26166
26167 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26168 false, context_die);
26169 gcc_assert (type_die);
26170 }
26171 return type_die;
26172 }
26173
26174 /* Force out any required namespaces to be able to output DECL,
26175 and return the new context_die for it, if it's changed. */
26176
26177 static dw_die_ref
26178 setup_namespace_context (tree thing, dw_die_ref context_die)
26179 {
26180 tree context = (DECL_P (thing)
26181 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26182 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26183 /* Force out the namespace. */
26184 context_die = force_decl_die (context);
26185
26186 return context_die;
26187 }
26188
26189 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26190 type) within its namespace, if appropriate.
26191
26192 For compatibility with older debuggers, namespace DIEs only contain
26193 declarations; all definitions are emitted at CU scope, with
26194 DW_AT_specification pointing to the declaration (like with class
26195 members). */
26196
26197 static dw_die_ref
26198 declare_in_namespace (tree thing, dw_die_ref context_die)
26199 {
26200 dw_die_ref ns_context;
26201
26202 if (debug_info_level <= DINFO_LEVEL_TERSE)
26203 return context_die;
26204
26205 /* External declarations in the local scope only need to be emitted
26206 once, not once in the namespace and once in the scope.
26207
26208 This avoids declaring the `extern' below in the
26209 namespace DIE as well as in the innermost scope:
26210
26211 namespace S
26212 {
26213 int i=5;
26214 int foo()
26215 {
26216 int i=8;
26217 extern int i;
26218 return i;
26219 }
26220 }
26221 */
26222 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26223 return context_die;
26224
26225 /* If this decl is from an inlined function, then don't try to emit it in its
26226 namespace, as we will get confused. It would have already been emitted
26227 when the abstract instance of the inline function was emitted anyways. */
26228 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26229 return context_die;
26230
26231 ns_context = setup_namespace_context (thing, context_die);
26232
26233 if (ns_context != context_die)
26234 {
26235 if (is_fortran () || is_dlang ())
26236 return ns_context;
26237 if (DECL_P (thing))
26238 gen_decl_die (thing, NULL, NULL, ns_context);
26239 else
26240 gen_type_die (thing, ns_context);
26241 }
26242 return context_die;
26243 }
26244
26245 /* Generate a DIE for a namespace or namespace alias. */
26246
26247 static void
26248 gen_namespace_die (tree decl, dw_die_ref context_die)
26249 {
26250 dw_die_ref namespace_die;
26251
26252 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26253 they are an alias of. */
26254 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26255 {
26256 /* Output a real namespace or module. */
26257 context_die = setup_namespace_context (decl, comp_unit_die ());
26258 namespace_die = new_die (is_fortran () || is_dlang ()
26259 ? DW_TAG_module : DW_TAG_namespace,
26260 context_die, decl);
26261 /* For Fortran modules defined in different CU don't add src coords. */
26262 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26263 {
26264 const char *name = dwarf2_name (decl, 0);
26265 if (name)
26266 add_name_attribute (namespace_die, name);
26267 }
26268 else
26269 add_name_and_src_coords_attributes (namespace_die, decl);
26270 if (DECL_EXTERNAL (decl))
26271 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26272 equate_decl_number_to_die (decl, namespace_die);
26273 }
26274 else
26275 {
26276 /* Output a namespace alias. */
26277
26278 /* Force out the namespace we are an alias of, if necessary. */
26279 dw_die_ref origin_die
26280 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26281
26282 if (DECL_FILE_SCOPE_P (decl)
26283 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26284 context_die = setup_namespace_context (decl, comp_unit_die ());
26285 /* Now create the namespace alias DIE. */
26286 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26287 add_name_and_src_coords_attributes (namespace_die, decl);
26288 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26289 equate_decl_number_to_die (decl, namespace_die);
26290 }
26291 if ((dwarf_version >= 5 || !dwarf_strict)
26292 && lang_hooks.decls.decl_dwarf_attribute (decl,
26293 DW_AT_export_symbols) == 1)
26294 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26295
26296 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26297 if (want_pubnames ())
26298 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26299 }
26300
26301 /* Generate Dwarf debug information for a decl described by DECL.
26302 The return value is currently only meaningful for PARM_DECLs,
26303 for all other decls it returns NULL.
26304
26305 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26306 It can be NULL otherwise. */
26307
26308 static dw_die_ref
26309 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26310 dw_die_ref context_die)
26311 {
26312 tree decl_or_origin = decl ? decl : origin;
26313 tree class_origin = NULL, ultimate_origin;
26314
26315 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26316 return NULL;
26317
26318 switch (TREE_CODE (decl_or_origin))
26319 {
26320 case ERROR_MARK:
26321 break;
26322
26323 case CONST_DECL:
26324 if (!is_fortran () && !is_ada () && !is_dlang ())
26325 {
26326 /* The individual enumerators of an enum type get output when we output
26327 the Dwarf representation of the relevant enum type itself. */
26328 break;
26329 }
26330
26331 /* Emit its type. */
26332 gen_type_die (TREE_TYPE (decl), context_die);
26333
26334 /* And its containing namespace. */
26335 context_die = declare_in_namespace (decl, context_die);
26336
26337 gen_const_die (decl, context_die);
26338 break;
26339
26340 case FUNCTION_DECL:
26341 #if 0
26342 /* FIXME */
26343 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26344 on local redeclarations of global functions. That seems broken. */
26345 if (current_function_decl != decl)
26346 /* This is only a declaration. */;
26347 #endif
26348
26349 /* We should have abstract copies already and should not generate
26350 stray type DIEs in late LTO dumping. */
26351 if (! early_dwarf)
26352 ;
26353
26354 /* If we're emitting a clone, emit info for the abstract instance. */
26355 else if (origin || DECL_ORIGIN (decl) != decl)
26356 dwarf2out_abstract_function (origin
26357 ? DECL_ORIGIN (origin)
26358 : DECL_ABSTRACT_ORIGIN (decl));
26359
26360 /* If we're emitting a possibly inlined function emit it as
26361 abstract instance. */
26362 else if (cgraph_function_possibly_inlined_p (decl)
26363 && ! DECL_ABSTRACT_P (decl)
26364 && ! class_or_namespace_scope_p (context_die)
26365 /* dwarf2out_abstract_function won't emit a die if this is just
26366 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26367 that case, because that works only if we have a die. */
26368 && DECL_INITIAL (decl) != NULL_TREE)
26369 dwarf2out_abstract_function (decl);
26370
26371 /* Otherwise we're emitting the primary DIE for this decl. */
26372 else if (debug_info_level > DINFO_LEVEL_TERSE)
26373 {
26374 /* Before we describe the FUNCTION_DECL itself, make sure that we
26375 have its containing type. */
26376 if (!origin)
26377 origin = decl_class_context (decl);
26378 if (origin != NULL_TREE)
26379 gen_type_die (origin, context_die);
26380
26381 /* And its return type. */
26382 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26383
26384 /* And its virtual context. */
26385 if (DECL_VINDEX (decl) != NULL_TREE)
26386 gen_type_die (DECL_CONTEXT (decl), context_die);
26387
26388 /* Make sure we have a member DIE for decl. */
26389 if (origin != NULL_TREE)
26390 gen_type_die_for_member (origin, decl, context_die);
26391
26392 /* And its containing namespace. */
26393 context_die = declare_in_namespace (decl, context_die);
26394 }
26395
26396 /* Now output a DIE to represent the function itself. */
26397 if (decl)
26398 gen_subprogram_die (decl, context_die);
26399 break;
26400
26401 case TYPE_DECL:
26402 /* If we are in terse mode, don't generate any DIEs to represent any
26403 actual typedefs. */
26404 if (debug_info_level <= DINFO_LEVEL_TERSE)
26405 break;
26406
26407 /* In the special case of a TYPE_DECL node representing the declaration
26408 of some type tag, if the given TYPE_DECL is marked as having been
26409 instantiated from some other (original) TYPE_DECL node (e.g. one which
26410 was generated within the original definition of an inline function) we
26411 used to generate a special (abbreviated) DW_TAG_structure_type,
26412 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26413 should be actually referencing those DIEs, as variable DIEs with that
26414 type would be emitted already in the abstract origin, so it was always
26415 removed during unused type prunning. Don't add anything in this
26416 case. */
26417 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26418 break;
26419
26420 if (is_redundant_typedef (decl))
26421 gen_type_die (TREE_TYPE (decl), context_die);
26422 else
26423 /* Output a DIE to represent the typedef itself. */
26424 gen_typedef_die (decl, context_die);
26425 break;
26426
26427 case LABEL_DECL:
26428 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26429 gen_label_die (decl, context_die);
26430 break;
26431
26432 case VAR_DECL:
26433 case RESULT_DECL:
26434 /* If we are in terse mode, don't generate any DIEs to represent any
26435 variable declarations or definitions unless it is external. */
26436 if (debug_info_level < DINFO_LEVEL_TERSE
26437 || (debug_info_level == DINFO_LEVEL_TERSE
26438 && !TREE_PUBLIC (decl_or_origin)))
26439 break;
26440
26441 if (debug_info_level > DINFO_LEVEL_TERSE)
26442 {
26443 /* Avoid generating stray type DIEs during late dwarf dumping.
26444 All types have been dumped early. */
26445 if (early_dwarf
26446 /* ??? But in LTRANS we cannot annotate early created variably
26447 modified type DIEs without copying them and adjusting all
26448 references to them. Dump them again as happens for inlining
26449 which copies both the decl and the types. */
26450 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26451 in VLA bound information for example. */
26452 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26453 current_function_decl)))
26454 {
26455 /* Output any DIEs that are needed to specify the type of this data
26456 object. */
26457 if (decl_by_reference_p (decl_or_origin))
26458 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26459 else
26460 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26461 }
26462
26463 if (early_dwarf)
26464 {
26465 /* And its containing type. */
26466 class_origin = decl_class_context (decl_or_origin);
26467 if (class_origin != NULL_TREE)
26468 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26469
26470 /* And its containing namespace. */
26471 context_die = declare_in_namespace (decl_or_origin, context_die);
26472 }
26473 }
26474
26475 /* Now output the DIE to represent the data object itself. This gets
26476 complicated because of the possibility that the VAR_DECL really
26477 represents an inlined instance of a formal parameter for an inline
26478 function. */
26479 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26480 if (ultimate_origin != NULL_TREE
26481 && TREE_CODE (ultimate_origin) == PARM_DECL)
26482 gen_formal_parameter_die (decl, origin,
26483 true /* Emit name attribute. */,
26484 context_die);
26485 else
26486 gen_variable_die (decl, origin, context_die);
26487 break;
26488
26489 case FIELD_DECL:
26490 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26491 /* Ignore the nameless fields that are used to skip bits but handle C++
26492 anonymous unions and structs. */
26493 if (DECL_NAME (decl) != NULL_TREE
26494 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26495 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26496 {
26497 gen_type_die (member_declared_type (decl), context_die);
26498 gen_field_die (decl, ctx, context_die);
26499 }
26500 break;
26501
26502 case PARM_DECL:
26503 /* Avoid generating stray type DIEs during late dwarf dumping.
26504 All types have been dumped early. */
26505 if (early_dwarf
26506 /* ??? But in LTRANS we cannot annotate early created variably
26507 modified type DIEs without copying them and adjusting all
26508 references to them. Dump them again as happens for inlining
26509 which copies both the decl and the types. */
26510 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26511 in VLA bound information for example. */
26512 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26513 current_function_decl)))
26514 {
26515 if (DECL_BY_REFERENCE (decl_or_origin))
26516 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26517 else
26518 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26519 }
26520 return gen_formal_parameter_die (decl, origin,
26521 true /* Emit name attribute. */,
26522 context_die);
26523
26524 case NAMESPACE_DECL:
26525 if (dwarf_version >= 3 || !dwarf_strict)
26526 gen_namespace_die (decl, context_die);
26527 break;
26528
26529 case IMPORTED_DECL:
26530 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26531 DECL_CONTEXT (decl), context_die);
26532 break;
26533
26534 case NAMELIST_DECL:
26535 gen_namelist_decl (DECL_NAME (decl), context_die,
26536 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26537 break;
26538
26539 default:
26540 /* Probably some frontend-internal decl. Assume we don't care. */
26541 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26542 break;
26543 }
26544
26545 return NULL;
26546 }
26547 \f
26548 /* Output initial debug information for global DECL. Called at the
26549 end of the parsing process.
26550
26551 This is the initial debug generation process. As such, the DIEs
26552 generated may be incomplete. A later debug generation pass
26553 (dwarf2out_late_global_decl) will augment the information generated
26554 in this pass (e.g., with complete location info). */
26555
26556 static void
26557 dwarf2out_early_global_decl (tree decl)
26558 {
26559 set_early_dwarf s;
26560
26561 /* gen_decl_die() will set DECL_ABSTRACT because
26562 cgraph_function_possibly_inlined_p() returns true. This is in
26563 turn will cause DW_AT_inline attributes to be set.
26564
26565 This happens because at early dwarf generation, there is no
26566 cgraph information, causing cgraph_function_possibly_inlined_p()
26567 to return true. Trick cgraph_function_possibly_inlined_p()
26568 while we generate dwarf early. */
26569 bool save = symtab->global_info_ready;
26570 symtab->global_info_ready = true;
26571
26572 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26573 other DECLs and they can point to template types or other things
26574 that dwarf2out can't handle when done via dwarf2out_decl. */
26575 if (TREE_CODE (decl) != TYPE_DECL
26576 && TREE_CODE (decl) != PARM_DECL)
26577 {
26578 if (TREE_CODE (decl) == FUNCTION_DECL)
26579 {
26580 tree save_fndecl = current_function_decl;
26581
26582 /* For nested functions, make sure we have DIEs for the parents first
26583 so that all nested DIEs are generated at the proper scope in the
26584 first shot. */
26585 tree context = decl_function_context (decl);
26586 if (context != NULL)
26587 {
26588 dw_die_ref context_die = lookup_decl_die (context);
26589 current_function_decl = context;
26590
26591 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26592 enough so that it lands in its own context. This avoids type
26593 pruning issues later on. */
26594 if (context_die == NULL || is_declaration_die (context_die))
26595 dwarf2out_early_global_decl (context);
26596 }
26597
26598 /* Emit an abstract origin of a function first. This happens
26599 with C++ constructor clones for example and makes
26600 dwarf2out_abstract_function happy which requires the early
26601 DIE of the abstract instance to be present. */
26602 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26603 dw_die_ref origin_die;
26604 if (origin != NULL
26605 /* Do not emit the DIE multiple times but make sure to
26606 process it fully here in case we just saw a declaration. */
26607 && ((origin_die = lookup_decl_die (origin)) == NULL
26608 || is_declaration_die (origin_die)))
26609 {
26610 current_function_decl = origin;
26611 dwarf2out_decl (origin);
26612 }
26613
26614 /* Emit the DIE for decl but avoid doing that multiple times. */
26615 dw_die_ref old_die;
26616 if ((old_die = lookup_decl_die (decl)) == NULL
26617 || is_declaration_die (old_die))
26618 {
26619 current_function_decl = decl;
26620 dwarf2out_decl (decl);
26621 }
26622
26623 current_function_decl = save_fndecl;
26624 }
26625 else
26626 dwarf2out_decl (decl);
26627 }
26628 symtab->global_info_ready = save;
26629 }
26630
26631 /* Return whether EXPR is an expression with the following pattern:
26632 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26633
26634 static bool
26635 is_trivial_indirect_ref (tree expr)
26636 {
26637 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26638 return false;
26639
26640 tree nop = TREE_OPERAND (expr, 0);
26641 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26642 return false;
26643
26644 tree int_cst = TREE_OPERAND (nop, 0);
26645 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26646 }
26647
26648 /* Output debug information for global decl DECL. Called from
26649 toplev.c after compilation proper has finished. */
26650
26651 static void
26652 dwarf2out_late_global_decl (tree decl)
26653 {
26654 /* Fill-in any location information we were unable to determine
26655 on the first pass. */
26656 if (VAR_P (decl))
26657 {
26658 dw_die_ref die = lookup_decl_die (decl);
26659
26660 /* We may have to generate full debug late for LTO in case debug
26661 was not enabled at compile-time or the target doesn't support
26662 the LTO early debug scheme. */
26663 if (! die && in_lto_p)
26664 dwarf2out_decl (decl);
26665 else if (die)
26666 {
26667 /* We get called via the symtab code invoking late_global_decl
26668 for symbols that are optimized out.
26669
26670 Do not add locations for those, except if they have a
26671 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26672 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26673 INDIRECT_REF expression, as this could generate relocations to
26674 text symbols in LTO object files, which is invalid. */
26675 varpool_node *node = varpool_node::get (decl);
26676 if ((! node || ! node->definition)
26677 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26678 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26679 tree_add_const_value_attribute_for_decl (die, decl);
26680 else
26681 add_location_or_const_value_attribute (die, decl, false);
26682 }
26683 }
26684 }
26685
26686 /* Output debug information for type decl DECL. Called from toplev.c
26687 and from language front ends (to record built-in types). */
26688 static void
26689 dwarf2out_type_decl (tree decl, int local)
26690 {
26691 if (!local)
26692 {
26693 set_early_dwarf s;
26694 dwarf2out_decl (decl);
26695 }
26696 }
26697
26698 /* Output debug information for imported module or decl DECL.
26699 NAME is non-NULL name in the lexical block if the decl has been renamed.
26700 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26701 that DECL belongs to.
26702 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26703 static void
26704 dwarf2out_imported_module_or_decl_1 (tree decl,
26705 tree name,
26706 tree lexical_block,
26707 dw_die_ref lexical_block_die)
26708 {
26709 expanded_location xloc;
26710 dw_die_ref imported_die = NULL;
26711 dw_die_ref at_import_die;
26712
26713 if (TREE_CODE (decl) == IMPORTED_DECL)
26714 {
26715 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26716 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26717 gcc_assert (decl);
26718 }
26719 else
26720 xloc = expand_location (input_location);
26721
26722 if (TREE_CODE (decl) == TYPE_DECL)
26723 {
26724 at_import_die = force_type_die (TREE_TYPE (decl));
26725 /* For namespace N { typedef void T; } using N::T; base_type_die
26726 returns NULL, but DW_TAG_imported_declaration requires
26727 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26728 if (!at_import_die)
26729 {
26730 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26731 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26732 at_import_die = lookup_type_die (TREE_TYPE (decl));
26733 gcc_assert (at_import_die);
26734 }
26735 }
26736 else
26737 {
26738 at_import_die = lookup_decl_die (decl);
26739 if (!at_import_die)
26740 {
26741 /* If we're trying to avoid duplicate debug info, we may not have
26742 emitted the member decl for this field. Emit it now. */
26743 if (TREE_CODE (decl) == FIELD_DECL)
26744 {
26745 tree type = DECL_CONTEXT (decl);
26746
26747 if (TYPE_CONTEXT (type)
26748 && TYPE_P (TYPE_CONTEXT (type))
26749 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26750 DINFO_USAGE_DIR_USE))
26751 return;
26752 gen_type_die_for_member (type, decl,
26753 get_context_die (TYPE_CONTEXT (type)));
26754 }
26755 if (TREE_CODE (decl) == CONST_DECL)
26756 {
26757 /* Individual enumerators of an enum type do not get output here
26758 (see gen_decl_die), so we cannot call force_decl_die. */
26759 if (!is_fortran () && !is_ada () && !is_dlang ())
26760 return;
26761 }
26762 if (TREE_CODE (decl) == NAMELIST_DECL)
26763 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26764 get_context_die (DECL_CONTEXT (decl)),
26765 NULL_TREE);
26766 else
26767 at_import_die = force_decl_die (decl);
26768 }
26769 }
26770
26771 if (TREE_CODE (decl) == NAMESPACE_DECL)
26772 {
26773 if (dwarf_version >= 3 || !dwarf_strict)
26774 imported_die = new_die (DW_TAG_imported_module,
26775 lexical_block_die,
26776 lexical_block);
26777 else
26778 return;
26779 }
26780 else
26781 imported_die = new_die (DW_TAG_imported_declaration,
26782 lexical_block_die,
26783 lexical_block);
26784
26785 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26786 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26787 if (debug_column_info && xloc.column)
26788 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26789 if (name)
26790 add_AT_string (imported_die, DW_AT_name,
26791 IDENTIFIER_POINTER (name));
26792 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26793 }
26794
26795 /* Output debug information for imported module or decl DECL.
26796 NAME is non-NULL name in context if the decl has been renamed.
26797 CHILD is true if decl is one of the renamed decls as part of
26798 importing whole module.
26799 IMPLICIT is set if this hook is called for an implicit import
26800 such as inline namespace. */
26801
26802 static void
26803 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26804 bool child, bool implicit)
26805 {
26806 /* dw_die_ref at_import_die; */
26807 dw_die_ref scope_die;
26808
26809 if (debug_info_level <= DINFO_LEVEL_TERSE)
26810 return;
26811
26812 gcc_assert (decl);
26813
26814 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26815 should be enough, for DWARF4 and older even if we emit as extension
26816 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26817 for the benefit of consumers unaware of DW_AT_export_symbols. */
26818 if (implicit
26819 && dwarf_version >= 5
26820 && lang_hooks.decls.decl_dwarf_attribute (decl,
26821 DW_AT_export_symbols) == 1)
26822 return;
26823
26824 set_early_dwarf s;
26825
26826 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26827 We need decl DIE for reference and scope die. First, get DIE for the decl
26828 itself. */
26829
26830 /* Get the scope die for decl context. Use comp_unit_die for global module
26831 or decl. If die is not found for non globals, force new die. */
26832 if (context
26833 && TYPE_P (context)
26834 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26835 return;
26836
26837 scope_die = get_context_die (context);
26838
26839 if (child)
26840 {
26841 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26842 there is nothing we can do, here. */
26843 if (dwarf_version < 3 && dwarf_strict)
26844 return;
26845
26846 gcc_assert (scope_die->die_child);
26847 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26848 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26849 scope_die = scope_die->die_child;
26850 }
26851
26852 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26853 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26854 }
26855
26856 /* Output debug information for namelists. */
26857
26858 static dw_die_ref
26859 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26860 {
26861 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26862 tree value;
26863 unsigned i;
26864
26865 if (debug_info_level <= DINFO_LEVEL_TERSE)
26866 return NULL;
26867
26868 gcc_assert (scope_die != NULL);
26869 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26870 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26871
26872 /* If there are no item_decls, we have a nondefining namelist, e.g.
26873 with USE association; hence, set DW_AT_declaration. */
26874 if (item_decls == NULL_TREE)
26875 {
26876 add_AT_flag (nml_die, DW_AT_declaration, 1);
26877 return nml_die;
26878 }
26879
26880 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26881 {
26882 nml_item_ref_die = lookup_decl_die (value);
26883 if (!nml_item_ref_die)
26884 nml_item_ref_die = force_decl_die (value);
26885
26886 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26887 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26888 }
26889 return nml_die;
26890 }
26891
26892
26893 /* Write the debugging output for DECL and return the DIE. */
26894
26895 static void
26896 dwarf2out_decl (tree decl)
26897 {
26898 dw_die_ref context_die = comp_unit_die ();
26899
26900 switch (TREE_CODE (decl))
26901 {
26902 case ERROR_MARK:
26903 return;
26904
26905 case FUNCTION_DECL:
26906 /* If we're a nested function, initially use a parent of NULL; if we're
26907 a plain function, this will be fixed up in decls_for_scope. If
26908 we're a method, it will be ignored, since we already have a DIE.
26909 Avoid doing this late though since clones of class methods may
26910 otherwise end up in limbo and create type DIEs late. */
26911 if (early_dwarf
26912 && decl_function_context (decl)
26913 /* But if we're in terse mode, we don't care about scope. */
26914 && debug_info_level > DINFO_LEVEL_TERSE)
26915 context_die = NULL;
26916 break;
26917
26918 case VAR_DECL:
26919 /* For local statics lookup proper context die. */
26920 if (local_function_static (decl))
26921 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26922
26923 /* If we are in terse mode, don't generate any DIEs to represent any
26924 variable declarations or definitions unless it is external. */
26925 if (debug_info_level < DINFO_LEVEL_TERSE
26926 || (debug_info_level == DINFO_LEVEL_TERSE
26927 && !TREE_PUBLIC (decl)))
26928 return;
26929 break;
26930
26931 case CONST_DECL:
26932 if (debug_info_level <= DINFO_LEVEL_TERSE)
26933 return;
26934 if (!is_fortran () && !is_ada () && !is_dlang ())
26935 return;
26936 if (TREE_STATIC (decl) && decl_function_context (decl))
26937 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26938 break;
26939
26940 case NAMESPACE_DECL:
26941 case IMPORTED_DECL:
26942 if (debug_info_level <= DINFO_LEVEL_TERSE)
26943 return;
26944 if (lookup_decl_die (decl) != NULL)
26945 return;
26946 break;
26947
26948 case TYPE_DECL:
26949 /* Don't emit stubs for types unless they are needed by other DIEs. */
26950 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26951 return;
26952
26953 /* Don't bother trying to generate any DIEs to represent any of the
26954 normal built-in types for the language we are compiling. */
26955 if (DECL_IS_UNDECLARED_BUILTIN (decl))
26956 return;
26957
26958 /* If we are in terse mode, don't generate any DIEs for types. */
26959 if (debug_info_level <= DINFO_LEVEL_TERSE)
26960 return;
26961
26962 /* If we're a function-scope tag, initially use a parent of NULL;
26963 this will be fixed up in decls_for_scope. */
26964 if (decl_function_context (decl))
26965 context_die = NULL;
26966
26967 break;
26968
26969 case NAMELIST_DECL:
26970 break;
26971
26972 default:
26973 return;
26974 }
26975
26976 gen_decl_die (decl, NULL, NULL, context_die);
26977
26978 if (flag_checking)
26979 {
26980 dw_die_ref die = lookup_decl_die (decl);
26981 if (die)
26982 check_die (die);
26983 }
26984 }
26985
26986 /* Write the debugging output for DECL. */
26987
26988 static void
26989 dwarf2out_function_decl (tree decl)
26990 {
26991 dwarf2out_decl (decl);
26992 call_arg_locations = NULL;
26993 call_arg_loc_last = NULL;
26994 call_site_count = -1;
26995 tail_call_site_count = -1;
26996 decl_loc_table->empty ();
26997 cached_dw_loc_list_table->empty ();
26998 }
26999
27000 /* Output a marker (i.e. a label) for the beginning of the generated code for
27001 a lexical block. */
27002
27003 static void
27004 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
27005 unsigned int blocknum)
27006 {
27007 switch_to_section (current_function_section ());
27008 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
27009 }
27010
27011 /* Output a marker (i.e. a label) for the end of the generated code for a
27012 lexical block. */
27013
27014 static void
27015 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
27016 {
27017 switch_to_section (current_function_section ());
27018 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
27019 }
27020
27021 /* Returns nonzero if it is appropriate not to emit any debugging
27022 information for BLOCK, because it doesn't contain any instructions.
27023
27024 Don't allow this for blocks with nested functions or local classes
27025 as we would end up with orphans, and in the presence of scheduling
27026 we may end up calling them anyway. */
27027
27028 static bool
27029 dwarf2out_ignore_block (const_tree block)
27030 {
27031 tree decl;
27032 unsigned int i;
27033
27034 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
27035 if (TREE_CODE (decl) == FUNCTION_DECL
27036 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27037 return 0;
27038 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
27039 {
27040 decl = BLOCK_NONLOCALIZED_VAR (block, i);
27041 if (TREE_CODE (decl) == FUNCTION_DECL
27042 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27043 return 0;
27044 }
27045
27046 return 1;
27047 }
27048
27049 /* Hash table routines for file_hash. */
27050
27051 bool
27052 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27053 {
27054 return filename_cmp (p1->filename, p2) == 0;
27055 }
27056
27057 hashval_t
27058 dwarf_file_hasher::hash (dwarf_file_data *p)
27059 {
27060 return htab_hash_string (p->filename);
27061 }
27062
27063 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27064 dwarf2out.c) and return its "index". The index of each (known) filename is
27065 just a unique number which is associated with only that one filename. We
27066 need such numbers for the sake of generating labels (in the .debug_sfnames
27067 section) and references to those files numbers (in the .debug_srcinfo
27068 and .debug_macinfo sections). If the filename given as an argument is not
27069 found in our current list, add it to the list and assign it the next
27070 available unique index number. */
27071
27072 static struct dwarf_file_data *
27073 lookup_filename (const char *file_name)
27074 {
27075 struct dwarf_file_data * created;
27076
27077 if (!file_name)
27078 return NULL;
27079
27080 if (!file_name[0])
27081 file_name = "<stdin>";
27082
27083 dwarf_file_data **slot
27084 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27085 INSERT);
27086 if (*slot)
27087 return *slot;
27088
27089 created = ggc_alloc<dwarf_file_data> ();
27090 created->filename = file_name;
27091 created->emitted_number = 0;
27092 *slot = created;
27093 return created;
27094 }
27095
27096 /* If the assembler will construct the file table, then translate the compiler
27097 internal file table number into the assembler file table number, and emit
27098 a .file directive if we haven't already emitted one yet. The file table
27099 numbers are different because we prune debug info for unused variables and
27100 types, which may include filenames. */
27101
27102 static int
27103 maybe_emit_file (struct dwarf_file_data * fd)
27104 {
27105 if (! fd->emitted_number)
27106 {
27107 if (last_emitted_file)
27108 fd->emitted_number = last_emitted_file->emitted_number + 1;
27109 else
27110 fd->emitted_number = 1;
27111 last_emitted_file = fd;
27112
27113 if (output_asm_line_debug_info ())
27114 {
27115 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27116 output_quoted_string (asm_out_file,
27117 remap_debug_filename (fd->filename));
27118 fputc ('\n', asm_out_file);
27119 }
27120 }
27121
27122 return fd->emitted_number;
27123 }
27124
27125 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27126 That generation should happen after function debug info has been
27127 generated. The value of the attribute is the constant value of ARG. */
27128
27129 static void
27130 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27131 {
27132 die_arg_entry entry;
27133
27134 if (!die || !arg)
27135 return;
27136
27137 gcc_assert (early_dwarf);
27138
27139 if (!tmpl_value_parm_die_table)
27140 vec_alloc (tmpl_value_parm_die_table, 32);
27141
27142 entry.die = die;
27143 entry.arg = arg;
27144 vec_safe_push (tmpl_value_parm_die_table, entry);
27145 }
27146
27147 /* Return TRUE if T is an instance of generic type, FALSE
27148 otherwise. */
27149
27150 static bool
27151 generic_type_p (tree t)
27152 {
27153 if (t == NULL_TREE || !TYPE_P (t))
27154 return false;
27155 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27156 }
27157
27158 /* Schedule the generation of the generic parameter dies for the
27159 instance of generic type T. The proper generation itself is later
27160 done by gen_scheduled_generic_parms_dies. */
27161
27162 static void
27163 schedule_generic_params_dies_gen (tree t)
27164 {
27165 if (!generic_type_p (t))
27166 return;
27167
27168 gcc_assert (early_dwarf);
27169
27170 if (!generic_type_instances)
27171 vec_alloc (generic_type_instances, 256);
27172
27173 vec_safe_push (generic_type_instances, t);
27174 }
27175
27176 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27177 by append_entry_to_tmpl_value_parm_die_table. This function must
27178 be called after function DIEs have been generated. */
27179
27180 static void
27181 gen_remaining_tmpl_value_param_die_attribute (void)
27182 {
27183 if (tmpl_value_parm_die_table)
27184 {
27185 unsigned i, j;
27186 die_arg_entry *e;
27187
27188 /* We do this in two phases - first get the cases we can
27189 handle during early-finish, preserving those we cannot
27190 (containing symbolic constants where we don't yet know
27191 whether we are going to output the referenced symbols).
27192 For those we try again at late-finish. */
27193 j = 0;
27194 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27195 {
27196 if (!e->die->removed
27197 && !tree_add_const_value_attribute (e->die, e->arg))
27198 {
27199 dw_loc_descr_ref loc = NULL;
27200 if (! early_dwarf
27201 && (dwarf_version >= 5 || !dwarf_strict))
27202 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27203 if (loc)
27204 add_AT_loc (e->die, DW_AT_location, loc);
27205 else
27206 (*tmpl_value_parm_die_table)[j++] = *e;
27207 }
27208 }
27209 tmpl_value_parm_die_table->truncate (j);
27210 }
27211 }
27212
27213 /* Generate generic parameters DIEs for instances of generic types
27214 that have been previously scheduled by
27215 schedule_generic_params_dies_gen. This function must be called
27216 after all the types of the CU have been laid out. */
27217
27218 static void
27219 gen_scheduled_generic_parms_dies (void)
27220 {
27221 unsigned i;
27222 tree t;
27223
27224 if (!generic_type_instances)
27225 return;
27226
27227 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27228 if (COMPLETE_TYPE_P (t))
27229 gen_generic_params_dies (t);
27230
27231 generic_type_instances = NULL;
27232 }
27233
27234
27235 /* Replace DW_AT_name for the decl with name. */
27236
27237 static void
27238 dwarf2out_set_name (tree decl, tree name)
27239 {
27240 dw_die_ref die;
27241 dw_attr_node *attr;
27242 const char *dname;
27243
27244 die = TYPE_SYMTAB_DIE (decl);
27245 if (!die)
27246 return;
27247
27248 dname = dwarf2_name (name, 0);
27249 if (!dname)
27250 return;
27251
27252 attr = get_AT (die, DW_AT_name);
27253 if (attr)
27254 {
27255 struct indirect_string_node *node;
27256
27257 node = find_AT_string (dname);
27258 /* replace the string. */
27259 attr->dw_attr_val.v.val_str = node;
27260 }
27261
27262 else
27263 add_name_attribute (die, dname);
27264 }
27265
27266 /* True if before or during processing of the first function being emitted. */
27267 static bool in_first_function_p = true;
27268 /* True if loc_note during dwarf2out_var_location call might still be
27269 before first real instruction at address equal to .Ltext0. */
27270 static bool maybe_at_text_label_p = true;
27271 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27272 static unsigned int first_loclabel_num_not_at_text_label;
27273
27274 /* Look ahead for a real insn. */
27275
27276 static rtx_insn *
27277 dwarf2out_next_real_insn (rtx_insn *loc_note)
27278 {
27279 rtx_insn *next_real = NEXT_INSN (loc_note);
27280
27281 while (next_real)
27282 if (INSN_P (next_real))
27283 break;
27284 else
27285 next_real = NEXT_INSN (next_real);
27286
27287 return next_real;
27288 }
27289
27290 /* Called by the final INSN scan whenever we see a var location. We
27291 use it to drop labels in the right places, and throw the location in
27292 our lookup table. */
27293
27294 static void
27295 dwarf2out_var_location (rtx_insn *loc_note)
27296 {
27297 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27298 struct var_loc_node *newloc;
27299 rtx_insn *next_real;
27300 rtx_insn *call_insn = NULL;
27301 static const char *last_label;
27302 static const char *last_postcall_label;
27303 static bool last_in_cold_section_p;
27304 static rtx_insn *expected_next_loc_note;
27305 tree decl;
27306 bool var_loc_p;
27307 var_loc_view view = 0;
27308
27309 if (!NOTE_P (loc_note))
27310 {
27311 if (CALL_P (loc_note))
27312 {
27313 maybe_reset_location_view (loc_note, cur_line_info_table);
27314 call_site_count++;
27315 if (SIBLING_CALL_P (loc_note))
27316 tail_call_site_count++;
27317 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27318 {
27319 call_insn = loc_note;
27320 loc_note = NULL;
27321 var_loc_p = false;
27322
27323 next_real = dwarf2out_next_real_insn (call_insn);
27324 cached_next_real_insn = NULL;
27325 goto create_label;
27326 }
27327 if (optimize == 0 && !flag_var_tracking)
27328 {
27329 /* When the var-tracking pass is not running, there is no note
27330 for indirect calls whose target is compile-time known. In this
27331 case, process such calls specifically so that we generate call
27332 sites for them anyway. */
27333 rtx x = PATTERN (loc_note);
27334 if (GET_CODE (x) == PARALLEL)
27335 x = XVECEXP (x, 0, 0);
27336 if (GET_CODE (x) == SET)
27337 x = SET_SRC (x);
27338 if (GET_CODE (x) == CALL)
27339 x = XEXP (x, 0);
27340 if (!MEM_P (x)
27341 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27342 || !SYMBOL_REF_DECL (XEXP (x, 0))
27343 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27344 != FUNCTION_DECL))
27345 {
27346 call_insn = loc_note;
27347 loc_note = NULL;
27348 var_loc_p = false;
27349
27350 next_real = dwarf2out_next_real_insn (call_insn);
27351 cached_next_real_insn = NULL;
27352 goto create_label;
27353 }
27354 }
27355 }
27356 else if (!debug_variable_location_views)
27357 gcc_unreachable ();
27358 else
27359 maybe_reset_location_view (loc_note, cur_line_info_table);
27360
27361 return;
27362 }
27363
27364 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27365 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27366 return;
27367
27368 /* Optimize processing a large consecutive sequence of location
27369 notes so we don't spend too much time in next_real_insn. If the
27370 next insn is another location note, remember the next_real_insn
27371 calculation for next time. */
27372 next_real = cached_next_real_insn;
27373 if (next_real)
27374 {
27375 if (expected_next_loc_note != loc_note)
27376 next_real = NULL;
27377 }
27378
27379 if (! next_real)
27380 next_real = dwarf2out_next_real_insn (loc_note);
27381
27382 if (next_real)
27383 {
27384 rtx_insn *next_note = NEXT_INSN (loc_note);
27385 while (next_note != next_real)
27386 {
27387 if (! next_note->deleted ()
27388 && NOTE_P (next_note)
27389 && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION)
27390 break;
27391 next_note = NEXT_INSN (next_note);
27392 }
27393
27394 if (next_note == next_real)
27395 cached_next_real_insn = NULL;
27396 else
27397 {
27398 expected_next_loc_note = next_note;
27399 cached_next_real_insn = next_real;
27400 }
27401 }
27402 else
27403 cached_next_real_insn = NULL;
27404
27405 /* If there are no instructions which would be affected by this note,
27406 don't do anything. */
27407 if (var_loc_p
27408 && next_real == NULL_RTX
27409 && !NOTE_DURING_CALL_P (loc_note))
27410 return;
27411
27412 create_label:
27413
27414 if (next_real == NULL_RTX)
27415 next_real = get_last_insn ();
27416
27417 /* If there were any real insns between note we processed last time
27418 and this note (or if it is the first note), clear
27419 last_{,postcall_}label so that they are not reused this time. */
27420 if (last_var_location_insn == NULL_RTX
27421 || last_var_location_insn != next_real
27422 || last_in_cold_section_p != in_cold_section_p)
27423 {
27424 last_label = NULL;
27425 last_postcall_label = NULL;
27426 }
27427
27428 if (var_loc_p)
27429 {
27430 const char *label
27431 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27432 view = cur_line_info_table->view;
27433 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27434 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27435 if (newloc == NULL)
27436 return;
27437 }
27438 else
27439 {
27440 decl = NULL_TREE;
27441 newloc = NULL;
27442 }
27443
27444 /* If there were no real insns between note we processed last time
27445 and this note, use the label we emitted last time. Otherwise
27446 create a new label and emit it. */
27447 if (last_label == NULL)
27448 {
27449 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27450 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27451 loclabel_num++;
27452 last_label = ggc_strdup (loclabel);
27453 /* See if loclabel might be equal to .Ltext0. If yes,
27454 bump first_loclabel_num_not_at_text_label. */
27455 if (!have_multiple_function_sections
27456 && in_first_function_p
27457 && maybe_at_text_label_p)
27458 {
27459 static rtx_insn *last_start;
27460 rtx_insn *insn;
27461 for (insn = loc_note; insn; insn = previous_insn (insn))
27462 if (insn == last_start)
27463 break;
27464 else if (!NONDEBUG_INSN_P (insn))
27465 continue;
27466 else
27467 {
27468 rtx body = PATTERN (insn);
27469 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27470 continue;
27471 /* Inline asm could occupy zero bytes. */
27472 else if (GET_CODE (body) == ASM_INPUT
27473 || asm_noperands (body) >= 0)
27474 continue;
27475 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27476 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27477 continue;
27478 #endif
27479 else
27480 {
27481 /* Assume insn has non-zero length. */
27482 maybe_at_text_label_p = false;
27483 break;
27484 }
27485 }
27486 if (maybe_at_text_label_p)
27487 {
27488 last_start = loc_note;
27489 first_loclabel_num_not_at_text_label = loclabel_num;
27490 }
27491 }
27492 }
27493
27494 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27495 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27496
27497 if (!var_loc_p)
27498 {
27499 struct call_arg_loc_node *ca_loc
27500 = ggc_cleared_alloc<call_arg_loc_node> ();
27501 rtx_insn *prev = call_insn;
27502
27503 ca_loc->call_arg_loc_note
27504 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27505 ca_loc->next = NULL;
27506 ca_loc->label = last_label;
27507 gcc_assert (prev
27508 && (CALL_P (prev)
27509 || (NONJUMP_INSN_P (prev)
27510 && GET_CODE (PATTERN (prev)) == SEQUENCE
27511 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27512 if (!CALL_P (prev))
27513 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27514 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27515
27516 /* Look for a SYMBOL_REF in the "prev" instruction. */
27517 rtx x = get_call_rtx_from (prev);
27518 if (x)
27519 {
27520 /* Try to get the call symbol, if any. */
27521 if (MEM_P (XEXP (x, 0)))
27522 x = XEXP (x, 0);
27523 /* First, look for a memory access to a symbol_ref. */
27524 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27525 && SYMBOL_REF_DECL (XEXP (x, 0))
27526 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27527 ca_loc->symbol_ref = XEXP (x, 0);
27528 /* Otherwise, look at a compile-time known user-level function
27529 declaration. */
27530 else if (MEM_P (x)
27531 && MEM_EXPR (x)
27532 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27533 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27534 }
27535
27536 ca_loc->block = insn_scope (prev);
27537 if (call_arg_locations)
27538 call_arg_loc_last->next = ca_loc;
27539 else
27540 call_arg_locations = ca_loc;
27541 call_arg_loc_last = ca_loc;
27542 }
27543 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27544 {
27545 newloc->label = last_label;
27546 newloc->view = view;
27547 }
27548 else
27549 {
27550 if (!last_postcall_label)
27551 {
27552 sprintf (loclabel, "%s-1", last_label);
27553 last_postcall_label = ggc_strdup (loclabel);
27554 }
27555 newloc->label = last_postcall_label;
27556 /* ??? This view is at last_label, not last_label-1, but we
27557 could only assume view at last_label-1 is zero if we could
27558 assume calls always have length greater than one. This is
27559 probably true in general, though there might be a rare
27560 exception to this rule, e.g. if a call insn is optimized out
27561 by target magic. Then, even the -1 in the label will be
27562 wrong, which might invalidate the range. Anyway, using view,
27563 though technically possibly incorrect, will work as far as
27564 ranges go: since L-1 is in the middle of the call insn,
27565 (L-1).0 and (L-1).V shouldn't make any difference, and having
27566 the loclist entry refer to the .loc entry might be useful, so
27567 leave it like this. */
27568 newloc->view = view;
27569 }
27570
27571 if (var_loc_p && flag_debug_asm)
27572 {
27573 const char *name, *sep, *patstr;
27574 if (decl && DECL_NAME (decl))
27575 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27576 else
27577 name = "";
27578 if (NOTE_VAR_LOCATION_LOC (loc_note))
27579 {
27580 sep = " => ";
27581 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27582 }
27583 else
27584 {
27585 sep = " ";
27586 patstr = "RESET";
27587 }
27588 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27589 name, sep, patstr);
27590 }
27591
27592 last_var_location_insn = next_real;
27593 last_in_cold_section_p = in_cold_section_p;
27594 }
27595
27596 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27597 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27598 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27599 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27600 BLOCK_FRAGMENT_ORIGIN links. */
27601 static bool
27602 block_within_block_p (tree block, tree outer, bool bothways)
27603 {
27604 if (block == outer)
27605 return true;
27606
27607 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27608 for (tree context = BLOCK_SUPERCONTEXT (block);
27609 context != outer;
27610 context = BLOCK_SUPERCONTEXT (context))
27611 if (!context || TREE_CODE (context) != BLOCK)
27612 return false;
27613
27614 if (!bothways)
27615 return true;
27616
27617 /* Now check that each block is actually referenced by its
27618 parent. */
27619 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27620 context = BLOCK_SUPERCONTEXT (context))
27621 {
27622 if (BLOCK_FRAGMENT_ORIGIN (context))
27623 {
27624 gcc_assert (!BLOCK_SUBBLOCKS (context));
27625 context = BLOCK_FRAGMENT_ORIGIN (context);
27626 }
27627 for (tree sub = BLOCK_SUBBLOCKS (context);
27628 sub != block;
27629 sub = BLOCK_CHAIN (sub))
27630 if (!sub)
27631 return false;
27632 if (context == outer)
27633 return true;
27634 else
27635 block = context;
27636 }
27637 }
27638
27639 /* Called during final while assembling the marker of the entry point
27640 for an inlined function. */
27641
27642 static void
27643 dwarf2out_inline_entry (tree block)
27644 {
27645 gcc_assert (debug_inline_points);
27646
27647 /* If we can't represent it, don't bother. */
27648 if (!(dwarf_version >= 3 || !dwarf_strict))
27649 return;
27650
27651 gcc_assert (DECL_P (block_ultimate_origin (block)));
27652
27653 /* Sanity check the block tree. This would catch a case in which
27654 BLOCK got removed from the tree reachable from the outermost
27655 lexical block, but got retained in markers. It would still link
27656 back to its parents, but some ancestor would be missing a link
27657 down the path to the sub BLOCK. If the block got removed, its
27658 BLOCK_NUMBER will not be a usable value. */
27659 if (flag_checking)
27660 gcc_assert (block_within_block_p (block,
27661 DECL_INITIAL (current_function_decl),
27662 true));
27663
27664 gcc_assert (inlined_function_outer_scope_p (block));
27665 gcc_assert (!lookup_block_die (block));
27666
27667 if (BLOCK_FRAGMENT_ORIGIN (block))
27668 block = BLOCK_FRAGMENT_ORIGIN (block);
27669 /* Can the entry point ever not be at the beginning of an
27670 unfragmented lexical block? */
27671 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27672 || (cur_line_info_table
27673 && !ZERO_VIEW_P (cur_line_info_table->view))))
27674 return;
27675
27676 if (!inline_entry_data_table)
27677 inline_entry_data_table
27678 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27679
27680
27681 inline_entry_data **iedp
27682 = inline_entry_data_table->find_slot_with_hash (block,
27683 htab_hash_pointer (block),
27684 INSERT);
27685 if (*iedp)
27686 /* ??? Ideally, we'd record all entry points for the same inlined
27687 function (some may have been duplicated by e.g. unrolling), but
27688 we have no way to represent that ATM. */
27689 return;
27690
27691 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27692 ied->block = block;
27693 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27694 ied->label_num = BLOCK_NUMBER (block);
27695 if (cur_line_info_table)
27696 ied->view = cur_line_info_table->view;
27697
27698 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27699 BLOCK_NUMBER (block));
27700 }
27701
27702 /* Called from finalize_size_functions for size functions so that their body
27703 can be encoded in the debug info to describe the layout of variable-length
27704 structures. */
27705
27706 static void
27707 dwarf2out_size_function (tree decl)
27708 {
27709 set_early_dwarf s;
27710 function_to_dwarf_procedure (decl);
27711 }
27712
27713 /* Note in one location list that text section has changed. */
27714
27715 int
27716 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27717 {
27718 var_loc_list *list = *slot;
27719 if (list->first)
27720 list->last_before_switch
27721 = list->last->next ? list->last->next : list->last;
27722 return 1;
27723 }
27724
27725 /* Note in all location lists that text section has changed. */
27726
27727 static void
27728 var_location_switch_text_section (void)
27729 {
27730 if (decl_loc_table == NULL)
27731 return;
27732
27733 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27734 }
27735
27736 /* Create a new line number table. */
27737
27738 static dw_line_info_table *
27739 new_line_info_table (void)
27740 {
27741 dw_line_info_table *table;
27742
27743 table = ggc_cleared_alloc<dw_line_info_table> ();
27744 table->file_num = 1;
27745 table->line_num = 1;
27746 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27747 FORCE_RESET_NEXT_VIEW (table->view);
27748 table->symviews_since_reset = 0;
27749
27750 return table;
27751 }
27752
27753 /* Lookup the "current" table into which we emit line info, so
27754 that we don't have to do it for every source line. */
27755
27756 static void
27757 set_cur_line_info_table (section *sec)
27758 {
27759 dw_line_info_table *table;
27760
27761 if (sec == text_section)
27762 table = text_section_line_info;
27763 else if (sec == cold_text_section)
27764 {
27765 table = cold_text_section_line_info;
27766 if (!table)
27767 {
27768 cold_text_section_line_info = table = new_line_info_table ();
27769 table->end_label = cold_end_label;
27770 }
27771 }
27772 else
27773 {
27774 const char *end_label;
27775
27776 if (crtl->has_bb_partition)
27777 {
27778 if (in_cold_section_p)
27779 end_label = crtl->subsections.cold_section_end_label;
27780 else
27781 end_label = crtl->subsections.hot_section_end_label;
27782 }
27783 else
27784 {
27785 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27786 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27787 current_function_funcdef_no);
27788 end_label = ggc_strdup (label);
27789 }
27790
27791 table = new_line_info_table ();
27792 table->end_label = end_label;
27793
27794 vec_safe_push (separate_line_info, table);
27795 }
27796
27797 if (output_asm_line_debug_info ())
27798 table->is_stmt = (cur_line_info_table
27799 ? cur_line_info_table->is_stmt
27800 : DWARF_LINE_DEFAULT_IS_STMT_START);
27801 cur_line_info_table = table;
27802 }
27803
27804
27805 /* We need to reset the locations at the beginning of each
27806 function. We can't do this in the end_function hook, because the
27807 declarations that use the locations won't have been output when
27808 that hook is called. Also compute have_multiple_function_sections here. */
27809
27810 static void
27811 dwarf2out_begin_function (tree fun)
27812 {
27813 section *sec = function_section (fun);
27814
27815 if (sec != text_section)
27816 have_multiple_function_sections = true;
27817
27818 if (crtl->has_bb_partition && !cold_text_section)
27819 {
27820 gcc_assert (current_function_decl == fun);
27821 cold_text_section = unlikely_text_section ();
27822 switch_to_section (cold_text_section);
27823 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27824 switch_to_section (sec);
27825 }
27826
27827 dwarf2out_note_section_used ();
27828 call_site_count = 0;
27829 tail_call_site_count = 0;
27830
27831 set_cur_line_info_table (sec);
27832 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27833 }
27834
27835 /* Helper function of dwarf2out_end_function, called only after emitting
27836 the very first function into assembly. Check if some .debug_loc range
27837 might end with a .LVL* label that could be equal to .Ltext0.
27838 In that case we must force using absolute addresses in .debug_loc ranges,
27839 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27840 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27841 list terminator.
27842 Set have_multiple_function_sections to true in that case and
27843 terminate htab traversal. */
27844
27845 int
27846 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27847 {
27848 var_loc_list *entry = *slot;
27849 struct var_loc_node *node;
27850
27851 node = entry->first;
27852 if (node && node->next && node->next->label)
27853 {
27854 unsigned int i;
27855 const char *label = node->next->label;
27856 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27857
27858 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27859 {
27860 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27861 if (strcmp (label, loclabel) == 0)
27862 {
27863 have_multiple_function_sections = true;
27864 return 0;
27865 }
27866 }
27867 }
27868 return 1;
27869 }
27870
27871 /* Hook called after emitting a function into assembly.
27872 This does something only for the very first function emitted. */
27873
27874 static void
27875 dwarf2out_end_function (unsigned int)
27876 {
27877 if (in_first_function_p
27878 && !have_multiple_function_sections
27879 && first_loclabel_num_not_at_text_label
27880 && decl_loc_table)
27881 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27882 in_first_function_p = false;
27883 maybe_at_text_label_p = false;
27884 }
27885
27886 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27887 front-ends register a translation unit even before dwarf2out_init is
27888 called. */
27889 static tree main_translation_unit = NULL_TREE;
27890
27891 /* Hook called by front-ends after they built their main translation unit.
27892 Associate comp_unit_die to UNIT. */
27893
27894 static void
27895 dwarf2out_register_main_translation_unit (tree unit)
27896 {
27897 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27898 && main_translation_unit == NULL_TREE);
27899 main_translation_unit = unit;
27900 /* If dwarf2out_init has not been called yet, it will perform the association
27901 itself looking at main_translation_unit. */
27902 if (decl_die_table != NULL)
27903 equate_decl_number_to_die (unit, comp_unit_die ());
27904 }
27905
27906 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27907
27908 static void
27909 push_dw_line_info_entry (dw_line_info_table *table,
27910 enum dw_line_info_opcode opcode, unsigned int val)
27911 {
27912 dw_line_info_entry e;
27913 e.opcode = opcode;
27914 e.val = val;
27915 vec_safe_push (table->entries, e);
27916 }
27917
27918 /* Output a label to mark the beginning of a source code line entry
27919 and record information relating to this source line, in
27920 'line_info_table' for later output of the .debug_line section. */
27921 /* ??? The discriminator parameter ought to be unsigned. */
27922
27923 static void
27924 dwarf2out_source_line (unsigned int line, unsigned int column,
27925 const char *filename,
27926 int discriminator, bool is_stmt)
27927 {
27928 unsigned int file_num;
27929 dw_line_info_table *table;
27930 static var_loc_view lvugid;
27931
27932 if (debug_info_level < DINFO_LEVEL_TERSE)
27933 return;
27934
27935 table = cur_line_info_table;
27936
27937 if (line == 0)
27938 {
27939 if (debug_variable_location_views
27940 && output_asm_line_debug_info ()
27941 && table && !RESETTING_VIEW_P (table->view))
27942 {
27943 /* If we're using the assembler to compute view numbers, we
27944 can't issue a .loc directive for line zero, so we can't
27945 get a view number at this point. We might attempt to
27946 compute it from the previous view, or equate it to a
27947 subsequent view (though it might not be there!), but
27948 since we're omitting the line number entry, we might as
27949 well omit the view number as well. That means pretending
27950 it's a view number zero, which might very well turn out
27951 to be correct. ??? Extend the assembler so that the
27952 compiler could emit e.g. ".locview .LVU#", to output a
27953 view without changing line number information. We'd then
27954 have to count it in symviews_since_reset; when it's omitted,
27955 it doesn't count. */
27956 if (!zero_view_p)
27957 zero_view_p = BITMAP_GGC_ALLOC ();
27958 bitmap_set_bit (zero_view_p, table->view);
27959 if (flag_debug_asm)
27960 {
27961 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27962 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27963 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27964 ASM_COMMENT_START);
27965 assemble_name (asm_out_file, label);
27966 putc ('\n', asm_out_file);
27967 }
27968 table->view = ++lvugid;
27969 }
27970 return;
27971 }
27972
27973 /* The discriminator column was added in dwarf4. Simplify the below
27974 by simply removing it if we're not supposed to output it. */
27975 if (dwarf_version < 4 && dwarf_strict)
27976 discriminator = 0;
27977
27978 if (!debug_column_info)
27979 column = 0;
27980
27981 file_num = maybe_emit_file (lookup_filename (filename));
27982
27983 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27984 the debugger has used the second (possibly duplicate) line number
27985 at the beginning of the function to mark the end of the prologue.
27986 We could eliminate any other duplicates within the function. For
27987 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27988 that second line number entry. */
27989 /* Recall that this end-of-prologue indication is *not* the same thing
27990 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27991 to which the hook corresponds, follows the last insn that was
27992 emitted by gen_prologue. What we need is to precede the first insn
27993 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27994 insn that corresponds to something the user wrote. These may be
27995 very different locations once scheduling is enabled. */
27996
27997 if (0 && file_num == table->file_num
27998 && line == table->line_num
27999 && column == table->column_num
28000 && discriminator == table->discrim_num
28001 && is_stmt == table->is_stmt)
28002 return;
28003
28004 switch_to_section (current_function_section ());
28005
28006 /* If requested, emit something human-readable. */
28007 if (flag_debug_asm)
28008 {
28009 if (debug_column_info)
28010 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
28011 filename, line, column);
28012 else
28013 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
28014 filename, line);
28015 }
28016
28017 if (output_asm_line_debug_info ())
28018 {
28019 /* Emit the .loc directive understood by GNU as. */
28020 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
28021 file_num, line, is_stmt, discriminator */
28022 fputs ("\t.loc ", asm_out_file);
28023 fprint_ul (asm_out_file, file_num);
28024 putc (' ', asm_out_file);
28025 fprint_ul (asm_out_file, line);
28026 putc (' ', asm_out_file);
28027 fprint_ul (asm_out_file, column);
28028
28029 if (is_stmt != table->is_stmt)
28030 {
28031 #if HAVE_GAS_LOC_STMT
28032 fputs (" is_stmt ", asm_out_file);
28033 putc (is_stmt ? '1' : '0', asm_out_file);
28034 #endif
28035 }
28036 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
28037 {
28038 gcc_assert (discriminator > 0);
28039 fputs (" discriminator ", asm_out_file);
28040 fprint_ul (asm_out_file, (unsigned long) discriminator);
28041 }
28042 if (debug_variable_location_views)
28043 {
28044 if (!RESETTING_VIEW_P (table->view))
28045 {
28046 table->symviews_since_reset++;
28047 if (table->symviews_since_reset > symview_upper_bound)
28048 symview_upper_bound = table->symviews_since_reset;
28049 /* When we're using the assembler to compute view
28050 numbers, we output symbolic labels after "view" in
28051 .loc directives, and the assembler will set them for
28052 us, so that we can refer to the view numbers in
28053 location lists. The only exceptions are when we know
28054 a view will be zero: "-0" is a forced reset, used
28055 e.g. in the beginning of functions, whereas "0" tells
28056 the assembler to check that there was a PC change
28057 since the previous view, in a way that implicitly
28058 resets the next view. */
28059 fputs (" view ", asm_out_file);
28060 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28061 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28062 assemble_name (asm_out_file, label);
28063 table->view = ++lvugid;
28064 }
28065 else
28066 {
28067 table->symviews_since_reset = 0;
28068 if (FORCE_RESETTING_VIEW_P (table->view))
28069 fputs (" view -0", asm_out_file);
28070 else
28071 fputs (" view 0", asm_out_file);
28072 /* Mark the present view as a zero view. Earlier debug
28073 binds may have already added its id to loclists to be
28074 emitted later, so we can't reuse the id for something
28075 else. However, it's good to know whether a view is
28076 known to be zero, because then we may be able to
28077 optimize out locviews that are all zeros, so take
28078 note of it in zero_view_p. */
28079 if (!zero_view_p)
28080 zero_view_p = BITMAP_GGC_ALLOC ();
28081 bitmap_set_bit (zero_view_p, lvugid);
28082 table->view = ++lvugid;
28083 }
28084 }
28085 putc ('\n', asm_out_file);
28086 }
28087 else
28088 {
28089 unsigned int label_num = ++line_info_label_num;
28090
28091 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28092
28093 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28094 push_dw_line_info_entry (table, LI_adv_address, label_num);
28095 else
28096 push_dw_line_info_entry (table, LI_set_address, label_num);
28097 if (debug_variable_location_views)
28098 {
28099 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28100 if (resetting)
28101 table->view = 0;
28102
28103 if (flag_debug_asm)
28104 fprintf (asm_out_file, "\t%s view %s%d\n",
28105 ASM_COMMENT_START,
28106 resetting ? "-" : "",
28107 table->view);
28108
28109 table->view++;
28110 }
28111 if (file_num != table->file_num)
28112 push_dw_line_info_entry (table, LI_set_file, file_num);
28113 if (discriminator != table->discrim_num)
28114 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28115 if (is_stmt != table->is_stmt)
28116 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28117 push_dw_line_info_entry (table, LI_set_line, line);
28118 if (debug_column_info)
28119 push_dw_line_info_entry (table, LI_set_column, column);
28120 }
28121
28122 table->file_num = file_num;
28123 table->line_num = line;
28124 table->column_num = column;
28125 table->discrim_num = discriminator;
28126 table->is_stmt = is_stmt;
28127 table->in_use = true;
28128 }
28129
28130 /* Record the beginning of a new source file. */
28131
28132 static void
28133 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28134 {
28135 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28136 {
28137 macinfo_entry e;
28138 e.code = DW_MACINFO_start_file;
28139 e.lineno = lineno;
28140 e.info = ggc_strdup (filename);
28141 vec_safe_push (macinfo_table, e);
28142 }
28143 }
28144
28145 /* Record the end of a source file. */
28146
28147 static void
28148 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28149 {
28150 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28151 {
28152 macinfo_entry e;
28153 e.code = DW_MACINFO_end_file;
28154 e.lineno = lineno;
28155 e.info = NULL;
28156 vec_safe_push (macinfo_table, e);
28157 }
28158 }
28159
28160 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28161 the tail part of the directive line, i.e. the part which is past the
28162 initial whitespace, #, whitespace, directive-name, whitespace part. */
28163
28164 static void
28165 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28166 const char *buffer ATTRIBUTE_UNUSED)
28167 {
28168 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28169 {
28170 macinfo_entry e;
28171 /* Insert a dummy first entry to be able to optimize the whole
28172 predefined macro block using DW_MACRO_import. */
28173 if (macinfo_table->is_empty () && lineno <= 1)
28174 {
28175 e.code = 0;
28176 e.lineno = 0;
28177 e.info = NULL;
28178 vec_safe_push (macinfo_table, e);
28179 }
28180 e.code = DW_MACINFO_define;
28181 e.lineno = lineno;
28182 e.info = ggc_strdup (buffer);
28183 vec_safe_push (macinfo_table, e);
28184 }
28185 }
28186
28187 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28188 the tail part of the directive line, i.e. the part which is past the
28189 initial whitespace, #, whitespace, directive-name, whitespace part. */
28190
28191 static void
28192 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28193 const char *buffer ATTRIBUTE_UNUSED)
28194 {
28195 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28196 {
28197 macinfo_entry e;
28198 /* Insert a dummy first entry to be able to optimize the whole
28199 predefined macro block using DW_MACRO_import. */
28200 if (macinfo_table->is_empty () && lineno <= 1)
28201 {
28202 e.code = 0;
28203 e.lineno = 0;
28204 e.info = NULL;
28205 vec_safe_push (macinfo_table, e);
28206 }
28207 e.code = DW_MACINFO_undef;
28208 e.lineno = lineno;
28209 e.info = ggc_strdup (buffer);
28210 vec_safe_push (macinfo_table, e);
28211 }
28212 }
28213
28214 /* Helpers to manipulate hash table of CUs. */
28215
28216 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28217 {
28218 static inline hashval_t hash (const macinfo_entry *);
28219 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28220 };
28221
28222 inline hashval_t
28223 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28224 {
28225 return htab_hash_string (entry->info);
28226 }
28227
28228 inline bool
28229 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28230 const macinfo_entry *entry2)
28231 {
28232 return !strcmp (entry1->info, entry2->info);
28233 }
28234
28235 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28236
28237 /* Output a single .debug_macinfo entry. */
28238
28239 static void
28240 output_macinfo_op (macinfo_entry *ref)
28241 {
28242 int file_num;
28243 size_t len;
28244 struct indirect_string_node *node;
28245 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28246 struct dwarf_file_data *fd;
28247
28248 switch (ref->code)
28249 {
28250 case DW_MACINFO_start_file:
28251 fd = lookup_filename (ref->info);
28252 file_num = maybe_emit_file (fd);
28253 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28254 dw2_asm_output_data_uleb128 (ref->lineno,
28255 "Included from line number %lu",
28256 (unsigned long) ref->lineno);
28257 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28258 break;
28259 case DW_MACINFO_end_file:
28260 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28261 break;
28262 case DW_MACINFO_define:
28263 case DW_MACINFO_undef:
28264 len = strlen (ref->info) + 1;
28265 if (!dwarf_strict
28266 && len > (size_t) dwarf_offset_size
28267 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28268 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28269 {
28270 ref->code = ref->code == DW_MACINFO_define
28271 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28272 output_macinfo_op (ref);
28273 return;
28274 }
28275 dw2_asm_output_data (1, ref->code,
28276 ref->code == DW_MACINFO_define
28277 ? "Define macro" : "Undefine macro");
28278 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28279 (unsigned long) ref->lineno);
28280 dw2_asm_output_nstring (ref->info, -1, "The macro");
28281 break;
28282 case DW_MACRO_define_strp:
28283 case DW_MACRO_undef_strp:
28284 /* NB: dwarf2out_finish performs:
28285 1. save_macinfo_strings
28286 2. hash table traverse of index_string
28287 3. output_macinfo -> output_macinfo_op
28288 4. output_indirect_strings
28289 -> hash table traverse of output_index_string
28290
28291 When output_macinfo_op is called, all index strings have been
28292 added to hash table by save_macinfo_strings and we can't pass
28293 INSERT to find_slot_with_hash which may expand hash table, even
28294 if no insertion is needed, and change hash table traverse order
28295 between index_string and output_index_string. */
28296 node = find_AT_string (ref->info, NO_INSERT);
28297 gcc_assert (node
28298 && (node->form == DW_FORM_strp
28299 || node->form == dwarf_FORM (DW_FORM_strx)));
28300 dw2_asm_output_data (1, ref->code,
28301 ref->code == DW_MACRO_define_strp
28302 ? "Define macro strp"
28303 : "Undefine macro strp");
28304 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28305 (unsigned long) ref->lineno);
28306 if (node->form == DW_FORM_strp)
28307 dw2_asm_output_offset (dwarf_offset_size, node->label,
28308 debug_str_section, "The macro: \"%s\"",
28309 ref->info);
28310 else
28311 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28312 ref->info);
28313 break;
28314 case DW_MACRO_import:
28315 dw2_asm_output_data (1, ref->code, "Import");
28316 ASM_GENERATE_INTERNAL_LABEL (label,
28317 DEBUG_MACRO_SECTION_LABEL,
28318 ref->lineno + macinfo_label_base);
28319 dw2_asm_output_offset (dwarf_offset_size, label, NULL, NULL);
28320 break;
28321 default:
28322 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28323 ASM_COMMENT_START, (unsigned long) ref->code);
28324 break;
28325 }
28326 }
28327
28328 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28329 other compilation unit .debug_macinfo sections. IDX is the first
28330 index of a define/undef, return the number of ops that should be
28331 emitted in a comdat .debug_macinfo section and emit
28332 a DW_MACRO_import entry referencing it.
28333 If the define/undef entry should be emitted normally, return 0. */
28334
28335 static unsigned
28336 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28337 macinfo_hash_type **macinfo_htab)
28338 {
28339 macinfo_entry *first, *second, *cur, *inc;
28340 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28341 unsigned char checksum[16];
28342 struct md5_ctx ctx;
28343 char *grp_name, *tail;
28344 const char *base;
28345 unsigned int i, count, encoded_filename_len, linebuf_len;
28346 macinfo_entry **slot;
28347
28348 first = &(*macinfo_table)[idx];
28349 second = &(*macinfo_table)[idx + 1];
28350
28351 /* Optimize only if there are at least two consecutive define/undef ops,
28352 and either all of them are before first DW_MACINFO_start_file
28353 with lineno {0,1} (i.e. predefined macro block), or all of them are
28354 in some included header file. */
28355 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28356 return 0;
28357 if (vec_safe_is_empty (files))
28358 {
28359 if (first->lineno > 1 || second->lineno > 1)
28360 return 0;
28361 }
28362 else if (first->lineno == 0)
28363 return 0;
28364
28365 /* Find the last define/undef entry that can be grouped together
28366 with first and at the same time compute md5 checksum of their
28367 codes, linenumbers and strings. */
28368 md5_init_ctx (&ctx);
28369 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28370 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28371 break;
28372 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28373 break;
28374 else
28375 {
28376 unsigned char code = cur->code;
28377 md5_process_bytes (&code, 1, &ctx);
28378 checksum_uleb128 (cur->lineno, &ctx);
28379 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28380 }
28381 md5_finish_ctx (&ctx, checksum);
28382 count = i - idx;
28383
28384 /* From the containing include filename (if any) pick up just
28385 usable characters from its basename. */
28386 if (vec_safe_is_empty (files))
28387 base = "";
28388 else
28389 base = lbasename (files->last ().info);
28390 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28391 if (ISIDNUM (base[i]) || base[i] == '.')
28392 encoded_filename_len++;
28393 /* Count . at the end. */
28394 if (encoded_filename_len)
28395 encoded_filename_len++;
28396
28397 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28398 linebuf_len = strlen (linebuf);
28399
28400 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28401 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28402 + 16 * 2 + 1);
28403 memcpy (grp_name, dwarf_offset_size == 4 ? "wm4." : "wm8.", 4);
28404 tail = grp_name + 4;
28405 if (encoded_filename_len)
28406 {
28407 for (i = 0; base[i]; i++)
28408 if (ISIDNUM (base[i]) || base[i] == '.')
28409 *tail++ = base[i];
28410 *tail++ = '.';
28411 }
28412 memcpy (tail, linebuf, linebuf_len);
28413 tail += linebuf_len;
28414 *tail++ = '.';
28415 for (i = 0; i < 16; i++)
28416 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28417
28418 /* Construct a macinfo_entry for DW_MACRO_import
28419 in the empty vector entry before the first define/undef. */
28420 inc = &(*macinfo_table)[idx - 1];
28421 inc->code = DW_MACRO_import;
28422 inc->lineno = 0;
28423 inc->info = ggc_strdup (grp_name);
28424 if (!*macinfo_htab)
28425 *macinfo_htab = new macinfo_hash_type (10);
28426 /* Avoid emitting duplicates. */
28427 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28428 if (*slot != NULL)
28429 {
28430 inc->code = 0;
28431 inc->info = NULL;
28432 /* If such an entry has been used before, just emit
28433 a DW_MACRO_import op. */
28434 inc = *slot;
28435 output_macinfo_op (inc);
28436 /* And clear all macinfo_entry in the range to avoid emitting them
28437 in the second pass. */
28438 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28439 {
28440 cur->code = 0;
28441 cur->info = NULL;
28442 }
28443 }
28444 else
28445 {
28446 *slot = inc;
28447 inc->lineno = (*macinfo_htab)->elements ();
28448 output_macinfo_op (inc);
28449 }
28450 return count;
28451 }
28452
28453 /* Save any strings needed by the macinfo table in the debug str
28454 table. All strings must be collected into the table by the time
28455 index_string is called. */
28456
28457 static void
28458 save_macinfo_strings (void)
28459 {
28460 unsigned len;
28461 unsigned i;
28462 macinfo_entry *ref;
28463
28464 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28465 {
28466 switch (ref->code)
28467 {
28468 /* Match the logic in output_macinfo_op to decide on
28469 indirect strings. */
28470 case DW_MACINFO_define:
28471 case DW_MACINFO_undef:
28472 len = strlen (ref->info) + 1;
28473 if (!dwarf_strict
28474 && len > (unsigned) dwarf_offset_size
28475 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28476 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28477 set_indirect_string (find_AT_string (ref->info));
28478 break;
28479 case DW_MACINFO_start_file:
28480 /* -gsplit-dwarf -g3 will also output filename as indirect
28481 string. */
28482 if (!dwarf_split_debug_info)
28483 break;
28484 /* Fall through. */
28485 case DW_MACRO_define_strp:
28486 case DW_MACRO_undef_strp:
28487 set_indirect_string (find_AT_string (ref->info));
28488 break;
28489 default:
28490 break;
28491 }
28492 }
28493 }
28494
28495 /* Output macinfo section(s). */
28496
28497 static void
28498 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28499 {
28500 unsigned i;
28501 unsigned long length = vec_safe_length (macinfo_table);
28502 macinfo_entry *ref;
28503 vec<macinfo_entry, va_gc> *files = NULL;
28504 macinfo_hash_type *macinfo_htab = NULL;
28505 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28506
28507 if (! length)
28508 return;
28509
28510 /* output_macinfo* uses these interchangeably. */
28511 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28512 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28513 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28514 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28515
28516 /* AIX Assembler inserts the length, so adjust the reference to match the
28517 offset expected by debuggers. */
28518 strcpy (dl_section_ref, debug_line_label);
28519 if (XCOFF_DEBUGGING_INFO)
28520 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28521
28522 /* For .debug_macro emit the section header. */
28523 if (!dwarf_strict || dwarf_version >= 5)
28524 {
28525 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28526 "DWARF macro version number");
28527 if (dwarf_offset_size == 8)
28528 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28529 else
28530 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28531 dw2_asm_output_offset (dwarf_offset_size, debug_line_label,
28532 debug_line_section, NULL);
28533 }
28534
28535 /* In the first loop, it emits the primary .debug_macinfo section
28536 and after each emitted op the macinfo_entry is cleared.
28537 If a longer range of define/undef ops can be optimized using
28538 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28539 the vector before the first define/undef in the range and the
28540 whole range of define/undef ops is not emitted and kept. */
28541 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28542 {
28543 switch (ref->code)
28544 {
28545 case DW_MACINFO_start_file:
28546 vec_safe_push (files, *ref);
28547 break;
28548 case DW_MACINFO_end_file:
28549 if (!vec_safe_is_empty (files))
28550 files->pop ();
28551 break;
28552 case DW_MACINFO_define:
28553 case DW_MACINFO_undef:
28554 if ((!dwarf_strict || dwarf_version >= 5)
28555 && HAVE_COMDAT_GROUP
28556 && vec_safe_length (files) != 1
28557 && i > 0
28558 && i + 1 < length
28559 && (*macinfo_table)[i - 1].code == 0)
28560 {
28561 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28562 if (count)
28563 {
28564 i += count - 1;
28565 continue;
28566 }
28567 }
28568 break;
28569 case 0:
28570 /* A dummy entry may be inserted at the beginning to be able
28571 to optimize the whole block of predefined macros. */
28572 if (i == 0)
28573 continue;
28574 default:
28575 break;
28576 }
28577 output_macinfo_op (ref);
28578 ref->info = NULL;
28579 ref->code = 0;
28580 }
28581
28582 if (!macinfo_htab)
28583 return;
28584
28585 /* Save the number of transparent includes so we can adjust the
28586 label number for the fat LTO object DWARF. */
28587 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28588
28589 delete macinfo_htab;
28590 macinfo_htab = NULL;
28591
28592 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28593 terminate the current chain and switch to a new comdat .debug_macinfo
28594 section and emit the define/undef entries within it. */
28595 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28596 switch (ref->code)
28597 {
28598 case 0:
28599 continue;
28600 case DW_MACRO_import:
28601 {
28602 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28603 tree comdat_key = get_identifier (ref->info);
28604 /* Terminate the previous .debug_macinfo section. */
28605 dw2_asm_output_data (1, 0, "End compilation unit");
28606 targetm.asm_out.named_section (debug_macinfo_section_name,
28607 SECTION_DEBUG
28608 | SECTION_LINKONCE
28609 | (early_lto_debug
28610 ? SECTION_EXCLUDE : 0),
28611 comdat_key);
28612 ASM_GENERATE_INTERNAL_LABEL (label,
28613 DEBUG_MACRO_SECTION_LABEL,
28614 ref->lineno + macinfo_label_base);
28615 ASM_OUTPUT_LABEL (asm_out_file, label);
28616 ref->code = 0;
28617 ref->info = NULL;
28618 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28619 "DWARF macro version number");
28620 if (dwarf_offset_size == 8)
28621 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28622 else
28623 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28624 }
28625 break;
28626 case DW_MACINFO_define:
28627 case DW_MACINFO_undef:
28628 output_macinfo_op (ref);
28629 ref->code = 0;
28630 ref->info = NULL;
28631 break;
28632 default:
28633 gcc_unreachable ();
28634 }
28635
28636 macinfo_label_base += macinfo_label_base_adj;
28637 }
28638
28639 /* As init_sections_and_labels may get called multiple times, have a
28640 generation count for labels. */
28641 static unsigned init_sections_and_labels_generation;
28642
28643 /* Initialize the various sections and labels for dwarf output and prefix
28644 them with PREFIX if non-NULL. Returns the generation (zero based
28645 number of times function was called). */
28646
28647 static unsigned
28648 init_sections_and_labels (bool early_lto_debug)
28649 {
28650 if (early_lto_debug)
28651 {
28652 if (!dwarf_split_debug_info)
28653 {
28654 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28655 SECTION_DEBUG | SECTION_EXCLUDE,
28656 NULL);
28657 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28658 SECTION_DEBUG | SECTION_EXCLUDE,
28659 NULL);
28660 debug_macinfo_section_name
28661 = ((dwarf_strict && dwarf_version < 5)
28662 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28663 debug_macinfo_section = get_section (debug_macinfo_section_name,
28664 SECTION_DEBUG
28665 | SECTION_EXCLUDE, NULL);
28666 }
28667 else
28668 {
28669 /* ??? Which of the following do we need early? */
28670 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28671 SECTION_DEBUG | SECTION_EXCLUDE,
28672 NULL);
28673 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28674 SECTION_DEBUG | SECTION_EXCLUDE,
28675 NULL);
28676 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28677 SECTION_DEBUG
28678 | SECTION_EXCLUDE, NULL);
28679 debug_skeleton_abbrev_section
28680 = get_section (DEBUG_LTO_ABBREV_SECTION,
28681 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28682 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28683 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28684 init_sections_and_labels_generation);
28685
28686 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28687 stay in the main .o, but the skeleton_line goes into the split
28688 off dwo. */
28689 debug_skeleton_line_section
28690 = get_section (DEBUG_LTO_LINE_SECTION,
28691 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28692 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28693 DEBUG_SKELETON_LINE_SECTION_LABEL,
28694 init_sections_and_labels_generation);
28695 debug_str_offsets_section
28696 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28697 SECTION_DEBUG | SECTION_EXCLUDE,
28698 NULL);
28699 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28700 DEBUG_SKELETON_INFO_SECTION_LABEL,
28701 init_sections_and_labels_generation);
28702 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28703 DEBUG_STR_DWO_SECTION_FLAGS,
28704 NULL);
28705 debug_macinfo_section_name
28706 = ((dwarf_strict && dwarf_version < 5)
28707 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28708 debug_macinfo_section = get_section (debug_macinfo_section_name,
28709 SECTION_DEBUG | SECTION_EXCLUDE,
28710 NULL);
28711 }
28712 /* For macro info and the file table we have to refer to a
28713 debug_line section. */
28714 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28715 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28716 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28717 DEBUG_LINE_SECTION_LABEL,
28718 init_sections_and_labels_generation);
28719
28720 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28721 DEBUG_STR_SECTION_FLAGS
28722 | SECTION_EXCLUDE, NULL);
28723 if (!dwarf_split_debug_info)
28724 debug_line_str_section
28725 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28726 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28727 }
28728 else
28729 {
28730 if (!dwarf_split_debug_info)
28731 {
28732 debug_info_section = get_section (DEBUG_INFO_SECTION,
28733 SECTION_DEBUG, NULL);
28734 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28735 SECTION_DEBUG, NULL);
28736 debug_loc_section = get_section (dwarf_version >= 5
28737 ? DEBUG_LOCLISTS_SECTION
28738 : DEBUG_LOC_SECTION,
28739 SECTION_DEBUG, NULL);
28740 debug_macinfo_section_name
28741 = ((dwarf_strict && dwarf_version < 5)
28742 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28743 debug_macinfo_section = get_section (debug_macinfo_section_name,
28744 SECTION_DEBUG, NULL);
28745 }
28746 else
28747 {
28748 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28749 SECTION_DEBUG | SECTION_EXCLUDE,
28750 NULL);
28751 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28752 SECTION_DEBUG | SECTION_EXCLUDE,
28753 NULL);
28754 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28755 SECTION_DEBUG, NULL);
28756 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28757 SECTION_DEBUG, NULL);
28758 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28759 SECTION_DEBUG, NULL);
28760 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28761 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28762 init_sections_and_labels_generation);
28763
28764 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28765 stay in the main .o, but the skeleton_line goes into the
28766 split off dwo. */
28767 debug_skeleton_line_section
28768 = get_section (DEBUG_DWO_LINE_SECTION,
28769 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28770 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28771 DEBUG_SKELETON_LINE_SECTION_LABEL,
28772 init_sections_and_labels_generation);
28773 debug_str_offsets_section
28774 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28775 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28776 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28777 DEBUG_SKELETON_INFO_SECTION_LABEL,
28778 init_sections_and_labels_generation);
28779 debug_loc_section = get_section (dwarf_version >= 5
28780 ? DEBUG_DWO_LOCLISTS_SECTION
28781 : DEBUG_DWO_LOC_SECTION,
28782 SECTION_DEBUG | SECTION_EXCLUDE,
28783 NULL);
28784 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28785 DEBUG_STR_DWO_SECTION_FLAGS,
28786 NULL);
28787 debug_macinfo_section_name
28788 = ((dwarf_strict && dwarf_version < 5)
28789 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28790 debug_macinfo_section = get_section (debug_macinfo_section_name,
28791 SECTION_DEBUG | SECTION_EXCLUDE,
28792 NULL);
28793 }
28794 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28795 SECTION_DEBUG, NULL);
28796 debug_line_section = get_section (DEBUG_LINE_SECTION,
28797 SECTION_DEBUG, NULL);
28798 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28799 SECTION_DEBUG, NULL);
28800 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28801 SECTION_DEBUG, NULL);
28802 debug_str_section = get_section (DEBUG_STR_SECTION,
28803 DEBUG_STR_SECTION_FLAGS, NULL);
28804 if ((!dwarf_split_debug_info && !output_asm_line_debug_info ())
28805 || asm_outputs_debug_line_str ())
28806 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28807 DEBUG_STR_SECTION_FLAGS, NULL);
28808
28809 debug_ranges_section = get_section (dwarf_version >= 5
28810 ? DEBUG_RNGLISTS_SECTION
28811 : DEBUG_RANGES_SECTION,
28812 SECTION_DEBUG, NULL);
28813 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28814 SECTION_DEBUG, NULL);
28815 }
28816
28817 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28818 DEBUG_ABBREV_SECTION_LABEL,
28819 init_sections_and_labels_generation);
28820 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28821 DEBUG_INFO_SECTION_LABEL,
28822 init_sections_and_labels_generation);
28823 info_section_emitted = false;
28824 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28825 DEBUG_LINE_SECTION_LABEL,
28826 init_sections_and_labels_generation);
28827 /* There are up to 4 unique ranges labels per generation.
28828 See also output_rnglists. */
28829 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28830 DEBUG_RANGES_SECTION_LABEL,
28831 init_sections_and_labels_generation * 4);
28832 if (dwarf_version >= 5 && dwarf_split_debug_info)
28833 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28834 DEBUG_RANGES_SECTION_LABEL,
28835 1 + init_sections_and_labels_generation * 4);
28836 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28837 DEBUG_ADDR_SECTION_LABEL,
28838 init_sections_and_labels_generation);
28839 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28840 (dwarf_strict && dwarf_version < 5)
28841 ? DEBUG_MACINFO_SECTION_LABEL
28842 : DEBUG_MACRO_SECTION_LABEL,
28843 init_sections_and_labels_generation);
28844 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28845 init_sections_and_labels_generation);
28846
28847 ++init_sections_and_labels_generation;
28848 return init_sections_and_labels_generation - 1;
28849 }
28850
28851 /* Set up for Dwarf output at the start of compilation. */
28852
28853 static void
28854 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28855 {
28856 /* Allocate the file_table. */
28857 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28858
28859 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28860 /* Allocate the decl_die_table. */
28861 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28862
28863 /* Allocate the decl_loc_table. */
28864 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28865
28866 /* Allocate the cached_dw_loc_list_table. */
28867 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28868
28869 /* Allocate the initial hunk of the abbrev_die_table. */
28870 vec_alloc (abbrev_die_table, 256);
28871 /* Zero-th entry is allocated, but unused. */
28872 abbrev_die_table->quick_push (NULL);
28873
28874 /* Allocate the dwarf_proc_stack_usage_map. */
28875 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28876
28877 /* Allocate the pubtypes and pubnames vectors. */
28878 vec_alloc (pubname_table, 32);
28879 vec_alloc (pubtype_table, 32);
28880
28881 vec_alloc (incomplete_types, 64);
28882
28883 vec_alloc (used_rtx_array, 32);
28884
28885 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28886 vec_alloc (macinfo_table, 64);
28887 #endif
28888
28889 /* If front-ends already registered a main translation unit but we were not
28890 ready to perform the association, do this now. */
28891 if (main_translation_unit != NULL_TREE)
28892 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28893 }
28894
28895 /* Called before compile () starts outputtting functions, variables
28896 and toplevel asms into assembly. */
28897
28898 static void
28899 dwarf2out_assembly_start (void)
28900 {
28901 if (text_section_line_info)
28902 return;
28903
28904 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28905 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28906 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28907 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28908 COLD_TEXT_SECTION_LABEL, 0);
28909 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28910
28911 switch_to_section (text_section);
28912 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28913 #endif
28914
28915 /* Make sure the line number table for .text always exists. */
28916 text_section_line_info = new_line_info_table ();
28917 text_section_line_info->end_label = text_end_label;
28918
28919 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28920 cur_line_info_table = text_section_line_info;
28921 #endif
28922
28923 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28924 && dwarf2out_do_cfi_asm ()
28925 && !dwarf2out_do_eh_frame ())
28926 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28927 }
28928
28929 /* A helper function for dwarf2out_finish called through
28930 htab_traverse. Assign a string its index. All strings must be
28931 collected into the table by the time index_string is called,
28932 because the indexing code relies on htab_traverse to traverse nodes
28933 in the same order for each run. */
28934
28935 int
28936 index_string (indirect_string_node **h, unsigned int *index)
28937 {
28938 indirect_string_node *node = *h;
28939
28940 find_string_form (node);
28941 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28942 {
28943 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28944 node->index = *index;
28945 *index += 1;
28946 }
28947 return 1;
28948 }
28949
28950 /* A helper function for output_indirect_strings called through
28951 htab_traverse. Output the offset to a string and update the
28952 current offset. */
28953
28954 int
28955 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28956 {
28957 indirect_string_node *node = *h;
28958
28959 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28960 {
28961 /* Assert that this node has been assigned an index. */
28962 gcc_assert (node->index != NO_INDEX_ASSIGNED
28963 && node->index != NOT_INDEXED);
28964 dw2_asm_output_data (dwarf_offset_size, *offset,
28965 "indexed string 0x%x: %s", node->index, node->str);
28966 *offset += strlen (node->str) + 1;
28967 }
28968 return 1;
28969 }
28970
28971 /* A helper function for dwarf2out_finish called through
28972 htab_traverse. Output the indexed string. */
28973
28974 int
28975 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28976 {
28977 struct indirect_string_node *node = *h;
28978
28979 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28980 {
28981 /* Assert that the strings are output in the same order as their
28982 indexes were assigned. */
28983 gcc_assert (*cur_idx == node->index);
28984 assemble_string (node->str, strlen (node->str) + 1);
28985 *cur_idx += 1;
28986 }
28987 return 1;
28988 }
28989
28990 /* A helper function for output_indirect_strings. Counts the number
28991 of index strings offsets. Must match the logic of the functions
28992 output_index_string[_offsets] above. */
28993 int
28994 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28995 {
28996 struct indirect_string_node *node = *h;
28997
28998 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28999 *last_idx += 1;
29000 return 1;
29001 }
29002
29003 /* A helper function for dwarf2out_finish called through
29004 htab_traverse. Emit one queued .debug_str string. */
29005
29006 int
29007 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
29008 {
29009 struct indirect_string_node *node = *h;
29010
29011 node->form = find_string_form (node);
29012 if (node->form == form && node->refcount > 0)
29013 {
29014 ASM_OUTPUT_LABEL (asm_out_file, node->label);
29015 assemble_string (node->str, strlen (node->str) + 1);
29016 }
29017
29018 return 1;
29019 }
29020
29021 /* Output the indexed string table. */
29022
29023 static void
29024 output_indirect_strings (void)
29025 {
29026 switch_to_section (debug_str_section);
29027 if (!dwarf_split_debug_info)
29028 debug_str_hash->traverse<enum dwarf_form,
29029 output_indirect_string> (DW_FORM_strp);
29030 else
29031 {
29032 unsigned int offset = 0;
29033 unsigned int cur_idx = 0;
29034
29035 if (skeleton_debug_str_hash)
29036 skeleton_debug_str_hash->traverse<enum dwarf_form,
29037 output_indirect_string> (DW_FORM_strp);
29038
29039 switch_to_section (debug_str_offsets_section);
29040 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
29041 header. Note that we don't need to generate a label to the
29042 actual index table following the header here, because this is
29043 for the split dwarf case only. In an .dwo file there is only
29044 one string offsets table (and one debug info section). But
29045 if we would start using string offset tables for the main (or
29046 skeleton) unit, then we have to add a DW_AT_str_offsets_base
29047 pointing to the actual index after the header. Split dwarf
29048 units will never have a string offsets base attribute. When
29049 a split unit is moved into a .dwp file the string offsets can
29050 be found through the .debug_cu_index section table. */
29051 if (dwarf_version >= 5)
29052 {
29053 unsigned int last_idx = 0;
29054 unsigned long str_offsets_length;
29055
29056 debug_str_hash->traverse_noresize
29057 <unsigned int *, count_index_strings> (&last_idx);
29058 str_offsets_length = last_idx * dwarf_offset_size + 4;
29059 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
29060 dw2_asm_output_data (4, 0xffffffff,
29061 "Escape value for 64-bit DWARF extension");
29062 dw2_asm_output_data (dwarf_offset_size, str_offsets_length,
29063 "Length of string offsets unit");
29064 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29065 dw2_asm_output_data (2, 0, "Header zero padding");
29066 }
29067 debug_str_hash->traverse_noresize
29068 <unsigned int *, output_index_string_offset> (&offset);
29069 switch_to_section (debug_str_dwo_section);
29070 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29071 (&cur_idx);
29072 }
29073 }
29074
29075 /* Callback for htab_traverse to assign an index to an entry in the
29076 table, and to write that entry to the .debug_addr section. */
29077
29078 int
29079 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29080 {
29081 addr_table_entry *entry = *slot;
29082
29083 if (entry->refcount == 0)
29084 {
29085 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29086 || entry->index == NOT_INDEXED);
29087 return 1;
29088 }
29089
29090 gcc_assert (entry->index == *cur_index);
29091 (*cur_index)++;
29092
29093 switch (entry->kind)
29094 {
29095 case ate_kind_rtx:
29096 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29097 "0x%x", entry->index);
29098 break;
29099 case ate_kind_rtx_dtprel:
29100 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29101 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29102 DWARF2_ADDR_SIZE,
29103 entry->addr.rtl);
29104 fputc ('\n', asm_out_file);
29105 break;
29106 case ate_kind_label:
29107 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29108 "0x%x", entry->index);
29109 break;
29110 default:
29111 gcc_unreachable ();
29112 }
29113 return 1;
29114 }
29115
29116 /* A helper function for dwarf2out_finish. Counts the number
29117 of indexed addresses. Must match the logic of the functions
29118 output_addr_table_entry above. */
29119 int
29120 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29121 {
29122 addr_table_entry *entry = *slot;
29123
29124 if (entry->refcount > 0)
29125 *last_idx += 1;
29126 return 1;
29127 }
29128
29129 /* Produce the .debug_addr section. */
29130
29131 static void
29132 output_addr_table (void)
29133 {
29134 unsigned int index = 0;
29135 if (addr_index_table == NULL || addr_index_table->size () == 0)
29136 return;
29137
29138 switch_to_section (debug_addr_section);
29139 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29140 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29141 before DWARF5, didn't have a header for .debug_addr units.
29142 DWARF5 specifies a small header when address tables are used. */
29143 if (dwarf_version >= 5)
29144 {
29145 unsigned int last_idx = 0;
29146 unsigned long addrs_length;
29147
29148 addr_index_table->traverse_noresize
29149 <unsigned int *, count_index_addrs> (&last_idx);
29150 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29151
29152 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
29153 dw2_asm_output_data (4, 0xffffffff,
29154 "Escape value for 64-bit DWARF extension");
29155 dw2_asm_output_data (dwarf_offset_size, addrs_length,
29156 "Length of Address Unit");
29157 dw2_asm_output_data (2, 5, "DWARF addr version");
29158 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29159 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29160 }
29161 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29162
29163 addr_index_table
29164 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29165 }
29166
29167 #if ENABLE_ASSERT_CHECKING
29168 /* Verify that all marks are clear. */
29169
29170 static void
29171 verify_marks_clear (dw_die_ref die)
29172 {
29173 dw_die_ref c;
29174
29175 gcc_assert (! die->die_mark);
29176 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29177 }
29178 #endif /* ENABLE_ASSERT_CHECKING */
29179
29180 /* Clear the marks for a die and its children.
29181 Be cool if the mark isn't set. */
29182
29183 static void
29184 prune_unmark_dies (dw_die_ref die)
29185 {
29186 dw_die_ref c;
29187
29188 if (die->die_mark)
29189 die->die_mark = 0;
29190 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29191 }
29192
29193 /* Given LOC that is referenced by a DIE we're marking as used, find all
29194 referenced DWARF procedures it references and mark them as used. */
29195
29196 static void
29197 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29198 {
29199 for (; loc != NULL; loc = loc->dw_loc_next)
29200 switch (loc->dw_loc_opc)
29201 {
29202 case DW_OP_implicit_pointer:
29203 case DW_OP_convert:
29204 case DW_OP_reinterpret:
29205 case DW_OP_GNU_implicit_pointer:
29206 case DW_OP_GNU_convert:
29207 case DW_OP_GNU_reinterpret:
29208 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29209 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29210 break;
29211 case DW_OP_GNU_variable_value:
29212 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29213 {
29214 dw_die_ref ref
29215 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29216 if (ref == NULL)
29217 break;
29218 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29219 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29220 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29221 }
29222 /* FALLTHRU */
29223 case DW_OP_call2:
29224 case DW_OP_call4:
29225 case DW_OP_call_ref:
29226 case DW_OP_const_type:
29227 case DW_OP_GNU_const_type:
29228 case DW_OP_GNU_parameter_ref:
29229 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29230 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29231 break;
29232 case DW_OP_regval_type:
29233 case DW_OP_deref_type:
29234 case DW_OP_GNU_regval_type:
29235 case DW_OP_GNU_deref_type:
29236 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29237 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29238 break;
29239 case DW_OP_entry_value:
29240 case DW_OP_GNU_entry_value:
29241 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29242 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29243 break;
29244 default:
29245 break;
29246 }
29247 }
29248
29249 /* Given DIE that we're marking as used, find any other dies
29250 it references as attributes and mark them as used. */
29251
29252 static void
29253 prune_unused_types_walk_attribs (dw_die_ref die)
29254 {
29255 dw_attr_node *a;
29256 unsigned ix;
29257
29258 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29259 {
29260 switch (AT_class (a))
29261 {
29262 /* Make sure DWARF procedures referenced by location descriptions will
29263 get emitted. */
29264 case dw_val_class_loc:
29265 prune_unused_types_walk_loc_descr (AT_loc (a));
29266 break;
29267 case dw_val_class_loc_list:
29268 for (dw_loc_list_ref list = AT_loc_list (a);
29269 list != NULL;
29270 list = list->dw_loc_next)
29271 prune_unused_types_walk_loc_descr (list->expr);
29272 break;
29273
29274 case dw_val_class_view_list:
29275 /* This points to a loc_list in another attribute, so it's
29276 already covered. */
29277 break;
29278
29279 case dw_val_class_die_ref:
29280 /* A reference to another DIE.
29281 Make sure that it will get emitted.
29282 If it was broken out into a comdat group, don't follow it. */
29283 if (! AT_ref (a)->comdat_type_p
29284 || a->dw_attr == DW_AT_specification)
29285 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29286 break;
29287
29288 case dw_val_class_str:
29289 /* Set the string's refcount to 0 so that prune_unused_types_mark
29290 accounts properly for it. */
29291 a->dw_attr_val.v.val_str->refcount = 0;
29292 break;
29293
29294 default:
29295 break;
29296 }
29297 }
29298 }
29299
29300 /* Mark the generic parameters and arguments children DIEs of DIE. */
29301
29302 static void
29303 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29304 {
29305 dw_die_ref c;
29306
29307 if (die == NULL || die->die_child == NULL)
29308 return;
29309 c = die->die_child;
29310 do
29311 {
29312 if (is_template_parameter (c))
29313 prune_unused_types_mark (c, 1);
29314 c = c->die_sib;
29315 } while (c && c != die->die_child);
29316 }
29317
29318 /* Mark DIE as being used. If DOKIDS is true, then walk down
29319 to DIE's children. */
29320
29321 static void
29322 prune_unused_types_mark (dw_die_ref die, int dokids)
29323 {
29324 dw_die_ref c;
29325
29326 if (die->die_mark == 0)
29327 {
29328 /* We haven't done this node yet. Mark it as used. */
29329 die->die_mark = 1;
29330 /* If this is the DIE of a generic type instantiation,
29331 mark the children DIEs that describe its generic parms and
29332 args. */
29333 prune_unused_types_mark_generic_parms_dies (die);
29334
29335 /* We also have to mark its parents as used.
29336 (But we don't want to mark our parent's kids due to this,
29337 unless it is a class.) */
29338 if (die->die_parent)
29339 prune_unused_types_mark (die->die_parent,
29340 class_scope_p (die->die_parent));
29341
29342 /* Mark any referenced nodes. */
29343 prune_unused_types_walk_attribs (die);
29344
29345 /* If this node is a specification,
29346 also mark the definition, if it exists. */
29347 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29348 prune_unused_types_mark (die->die_definition, 1);
29349 }
29350
29351 if (dokids && die->die_mark != 2)
29352 {
29353 /* We need to walk the children, but haven't done so yet.
29354 Remember that we've walked the kids. */
29355 die->die_mark = 2;
29356
29357 /* If this is an array type, we need to make sure our
29358 kids get marked, even if they're types. If we're
29359 breaking out types into comdat sections, do this
29360 for all type definitions. */
29361 if (die->die_tag == DW_TAG_array_type
29362 || (use_debug_types
29363 && is_type_die (die) && ! is_declaration_die (die)))
29364 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29365 else
29366 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29367 }
29368 }
29369
29370 /* For local classes, look if any static member functions were emitted
29371 and if so, mark them. */
29372
29373 static void
29374 prune_unused_types_walk_local_classes (dw_die_ref die)
29375 {
29376 dw_die_ref c;
29377
29378 if (die->die_mark == 2)
29379 return;
29380
29381 switch (die->die_tag)
29382 {
29383 case DW_TAG_structure_type:
29384 case DW_TAG_union_type:
29385 case DW_TAG_class_type:
29386 case DW_TAG_interface_type:
29387 break;
29388
29389 case DW_TAG_subprogram:
29390 if (!get_AT_flag (die, DW_AT_declaration)
29391 || die->die_definition != NULL)
29392 prune_unused_types_mark (die, 1);
29393 return;
29394
29395 default:
29396 return;
29397 }
29398
29399 /* Mark children. */
29400 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29401 }
29402
29403 /* Walk the tree DIE and mark types that we actually use. */
29404
29405 static void
29406 prune_unused_types_walk (dw_die_ref die)
29407 {
29408 dw_die_ref c;
29409
29410 /* Don't do anything if this node is already marked and
29411 children have been marked as well. */
29412 if (die->die_mark == 2)
29413 return;
29414
29415 switch (die->die_tag)
29416 {
29417 case DW_TAG_structure_type:
29418 case DW_TAG_union_type:
29419 case DW_TAG_class_type:
29420 case DW_TAG_interface_type:
29421 if (die->die_perennial_p)
29422 break;
29423
29424 for (c = die->die_parent; c; c = c->die_parent)
29425 if (c->die_tag == DW_TAG_subprogram)
29426 break;
29427
29428 /* Finding used static member functions inside of classes
29429 is needed just for local classes, because for other classes
29430 static member function DIEs with DW_AT_specification
29431 are emitted outside of the DW_TAG_*_type. If we ever change
29432 it, we'd need to call this even for non-local classes. */
29433 if (c)
29434 prune_unused_types_walk_local_classes (die);
29435
29436 /* It's a type node --- don't mark it. */
29437 return;
29438
29439 case DW_TAG_const_type:
29440 case DW_TAG_packed_type:
29441 case DW_TAG_pointer_type:
29442 case DW_TAG_reference_type:
29443 case DW_TAG_rvalue_reference_type:
29444 case DW_TAG_volatile_type:
29445 case DW_TAG_typedef:
29446 case DW_TAG_array_type:
29447 case DW_TAG_friend:
29448 case DW_TAG_enumeration_type:
29449 case DW_TAG_subroutine_type:
29450 case DW_TAG_string_type:
29451 case DW_TAG_set_type:
29452 case DW_TAG_subrange_type:
29453 case DW_TAG_ptr_to_member_type:
29454 case DW_TAG_file_type:
29455 /* Type nodes are useful only when other DIEs reference them --- don't
29456 mark them. */
29457 /* FALLTHROUGH */
29458
29459 case DW_TAG_dwarf_procedure:
29460 /* Likewise for DWARF procedures. */
29461
29462 if (die->die_perennial_p)
29463 break;
29464
29465 return;
29466
29467 case DW_TAG_variable:
29468 if (flag_debug_only_used_symbols)
29469 {
29470 if (die->die_perennial_p)
29471 break;
29472
29473 /* premark_used_variables marks external variables --- don't mark
29474 them here. But function-local externals are always considered
29475 used. */
29476 if (get_AT (die, DW_AT_external))
29477 {
29478 for (c = die->die_parent; c; c = c->die_parent)
29479 if (c->die_tag == DW_TAG_subprogram)
29480 break;
29481 if (!c)
29482 return;
29483 }
29484 }
29485 /* FALLTHROUGH */
29486
29487 default:
29488 /* Mark everything else. */
29489 break;
29490 }
29491
29492 if (die->die_mark == 0)
29493 {
29494 die->die_mark = 1;
29495
29496 /* Now, mark any dies referenced from here. */
29497 prune_unused_types_walk_attribs (die);
29498 }
29499
29500 die->die_mark = 2;
29501
29502 /* Mark children. */
29503 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29504 }
29505
29506 /* Increment the string counts on strings referred to from DIE's
29507 attributes. */
29508
29509 static void
29510 prune_unused_types_update_strings (dw_die_ref die)
29511 {
29512 dw_attr_node *a;
29513 unsigned ix;
29514
29515 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29516 if (AT_class (a) == dw_val_class_str)
29517 {
29518 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29519 s->refcount++;
29520 /* Avoid unnecessarily putting strings that are used less than
29521 twice in the hash table. */
29522 if (s->form != DW_FORM_line_strp
29523 && (s->refcount
29524 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2)))
29525 {
29526 indirect_string_node **slot
29527 = debug_str_hash->find_slot_with_hash (s->str,
29528 htab_hash_string (s->str),
29529 INSERT);
29530 gcc_assert (*slot == NULL);
29531 *slot = s;
29532 }
29533 }
29534 }
29535
29536 /* Mark DIE and its children as removed. */
29537
29538 static void
29539 mark_removed (dw_die_ref die)
29540 {
29541 dw_die_ref c;
29542 die->removed = true;
29543 FOR_EACH_CHILD (die, c, mark_removed (c));
29544 }
29545
29546 /* Remove from the tree DIE any dies that aren't marked. */
29547
29548 static void
29549 prune_unused_types_prune (dw_die_ref die)
29550 {
29551 dw_die_ref c;
29552
29553 gcc_assert (die->die_mark);
29554 prune_unused_types_update_strings (die);
29555
29556 if (! die->die_child)
29557 return;
29558
29559 c = die->die_child;
29560 do {
29561 dw_die_ref prev = c, next;
29562 for (c = c->die_sib; ! c->die_mark; c = next)
29563 if (c == die->die_child)
29564 {
29565 /* No marked children between 'prev' and the end of the list. */
29566 if (prev == c)
29567 /* No marked children at all. */
29568 die->die_child = NULL;
29569 else
29570 {
29571 prev->die_sib = c->die_sib;
29572 die->die_child = prev;
29573 }
29574 c->die_sib = NULL;
29575 mark_removed (c);
29576 return;
29577 }
29578 else
29579 {
29580 next = c->die_sib;
29581 c->die_sib = NULL;
29582 mark_removed (c);
29583 }
29584
29585 if (c != prev->die_sib)
29586 prev->die_sib = c;
29587 prune_unused_types_prune (c);
29588 } while (c != die->die_child);
29589 }
29590
29591 /* Remove dies representing declarations that we never use. */
29592
29593 static void
29594 prune_unused_types (void)
29595 {
29596 unsigned int i;
29597 limbo_die_node *node;
29598 comdat_type_node *ctnode;
29599 pubname_entry *pub;
29600 dw_die_ref base_type;
29601
29602 #if ENABLE_ASSERT_CHECKING
29603 /* All the marks should already be clear. */
29604 verify_marks_clear (comp_unit_die ());
29605 for (node = limbo_die_list; node; node = node->next)
29606 verify_marks_clear (node->die);
29607 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29608 verify_marks_clear (ctnode->root_die);
29609 #endif /* ENABLE_ASSERT_CHECKING */
29610
29611 /* Mark types that are used in global variables. */
29612 premark_types_used_by_global_vars ();
29613
29614 /* Mark variables used in the symtab. */
29615 if (flag_debug_only_used_symbols)
29616 premark_used_variables ();
29617
29618 /* Set the mark on nodes that are actually used. */
29619 prune_unused_types_walk (comp_unit_die ());
29620 for (node = limbo_die_list; node; node = node->next)
29621 prune_unused_types_walk (node->die);
29622 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29623 {
29624 prune_unused_types_walk (ctnode->root_die);
29625 prune_unused_types_mark (ctnode->type_die, 1);
29626 }
29627
29628 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29629 are unusual in that they are pubnames that are the children of pubtypes.
29630 They should only be marked via their parent DW_TAG_enumeration_type die,
29631 not as roots in themselves. */
29632 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29633 if (pub->die->die_tag != DW_TAG_enumerator)
29634 prune_unused_types_mark (pub->die, 1);
29635 for (i = 0; base_types.iterate (i, &base_type); i++)
29636 prune_unused_types_mark (base_type, 1);
29637
29638 /* Also set the mark on nodes that could be referenced by
29639 DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
29640 by DW_TAG_inlined_subroutine origins. */
29641 cgraph_node *cnode;
29642 FOR_EACH_FUNCTION (cnode)
29643 if (cnode->referred_to_p (false))
29644 {
29645 dw_die_ref die = lookup_decl_die (cnode->decl);
29646 if (die == NULL || die->die_mark)
29647 continue;
29648 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29649 if (e->caller != cnode)
29650 {
29651 prune_unused_types_mark (die, 1);
29652 break;
29653 }
29654 }
29655
29656 if (debug_str_hash)
29657 debug_str_hash->empty ();
29658 if (skeleton_debug_str_hash)
29659 skeleton_debug_str_hash->empty ();
29660 prune_unused_types_prune (comp_unit_die ());
29661 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29662 {
29663 node = *pnode;
29664 if (!node->die->die_mark)
29665 *pnode = node->next;
29666 else
29667 {
29668 prune_unused_types_prune (node->die);
29669 pnode = &node->next;
29670 }
29671 }
29672 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29673 prune_unused_types_prune (ctnode->root_die);
29674
29675 /* Leave the marks clear. */
29676 prune_unmark_dies (comp_unit_die ());
29677 for (node = limbo_die_list; node; node = node->next)
29678 prune_unmark_dies (node->die);
29679 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29680 prune_unmark_dies (ctnode->root_die);
29681 }
29682
29683 /* Helpers to manipulate hash table of comdat type units. */
29684
29685 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29686 {
29687 static inline hashval_t hash (const comdat_type_node *);
29688 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29689 };
29690
29691 inline hashval_t
29692 comdat_type_hasher::hash (const comdat_type_node *type_node)
29693 {
29694 hashval_t h;
29695 memcpy (&h, type_node->signature, sizeof (h));
29696 return h;
29697 }
29698
29699 inline bool
29700 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29701 const comdat_type_node *type_node_2)
29702 {
29703 return (! memcmp (type_node_1->signature, type_node_2->signature,
29704 DWARF_TYPE_SIGNATURE_SIZE));
29705 }
29706
29707 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29708 to the location it would have been added, should we know its
29709 DECL_ASSEMBLER_NAME when we added other attributes. This will
29710 probably improve compactness of debug info, removing equivalent
29711 abbrevs, and hide any differences caused by deferring the
29712 computation of the assembler name, triggered by e.g. PCH. */
29713
29714 static inline void
29715 move_linkage_attr (dw_die_ref die)
29716 {
29717 unsigned ix = vec_safe_length (die->die_attr);
29718 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29719
29720 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29721 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29722
29723 while (--ix > 0)
29724 {
29725 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29726
29727 if (prev->dw_attr == DW_AT_decl_line
29728 || prev->dw_attr == DW_AT_decl_column
29729 || prev->dw_attr == DW_AT_name)
29730 break;
29731 }
29732
29733 if (ix != vec_safe_length (die->die_attr) - 1)
29734 {
29735 die->die_attr->pop ();
29736 die->die_attr->quick_insert (ix, linkage);
29737 }
29738 }
29739
29740 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29741 referenced from typed stack ops and count how often they are used. */
29742
29743 static void
29744 mark_base_types (dw_loc_descr_ref loc)
29745 {
29746 dw_die_ref base_type = NULL;
29747
29748 for (; loc; loc = loc->dw_loc_next)
29749 {
29750 switch (loc->dw_loc_opc)
29751 {
29752 case DW_OP_regval_type:
29753 case DW_OP_deref_type:
29754 case DW_OP_GNU_regval_type:
29755 case DW_OP_GNU_deref_type:
29756 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29757 break;
29758 case DW_OP_convert:
29759 case DW_OP_reinterpret:
29760 case DW_OP_GNU_convert:
29761 case DW_OP_GNU_reinterpret:
29762 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29763 continue;
29764 /* FALLTHRU */
29765 case DW_OP_const_type:
29766 case DW_OP_GNU_const_type:
29767 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29768 break;
29769 case DW_OP_entry_value:
29770 case DW_OP_GNU_entry_value:
29771 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29772 continue;
29773 default:
29774 continue;
29775 }
29776 gcc_assert (base_type->die_parent == comp_unit_die ());
29777 if (base_type->die_mark)
29778 base_type->die_mark++;
29779 else
29780 {
29781 base_types.safe_push (base_type);
29782 base_type->die_mark = 1;
29783 }
29784 }
29785 }
29786
29787 /* Comparison function for sorting marked base types. */
29788
29789 static int
29790 base_type_cmp (const void *x, const void *y)
29791 {
29792 dw_die_ref dx = *(const dw_die_ref *) x;
29793 dw_die_ref dy = *(const dw_die_ref *) y;
29794 unsigned int byte_size1, byte_size2;
29795 unsigned int encoding1, encoding2;
29796 unsigned int align1, align2;
29797 if (dx->die_mark > dy->die_mark)
29798 return -1;
29799 if (dx->die_mark < dy->die_mark)
29800 return 1;
29801 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29802 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29803 if (byte_size1 < byte_size2)
29804 return 1;
29805 if (byte_size1 > byte_size2)
29806 return -1;
29807 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29808 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29809 if (encoding1 < encoding2)
29810 return 1;
29811 if (encoding1 > encoding2)
29812 return -1;
29813 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29814 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29815 if (align1 < align2)
29816 return 1;
29817 if (align1 > align2)
29818 return -1;
29819 return 0;
29820 }
29821
29822 /* Move base types marked by mark_base_types as early as possible
29823 in the CU, sorted by decreasing usage count both to make the
29824 uleb128 references as small as possible and to make sure they
29825 will have die_offset already computed by calc_die_sizes when
29826 sizes of typed stack loc ops is computed. */
29827
29828 static void
29829 move_marked_base_types (void)
29830 {
29831 unsigned int i;
29832 dw_die_ref base_type, die, c;
29833
29834 if (base_types.is_empty ())
29835 return;
29836
29837 /* Sort by decreasing usage count, they will be added again in that
29838 order later on. */
29839 base_types.qsort (base_type_cmp);
29840 die = comp_unit_die ();
29841 c = die->die_child;
29842 do
29843 {
29844 dw_die_ref prev = c;
29845 c = c->die_sib;
29846 while (c->die_mark)
29847 {
29848 remove_child_with_prev (c, prev);
29849 /* As base types got marked, there must be at least
29850 one node other than DW_TAG_base_type. */
29851 gcc_assert (die->die_child != NULL);
29852 c = prev->die_sib;
29853 }
29854 }
29855 while (c != die->die_child);
29856 gcc_assert (die->die_child);
29857 c = die->die_child;
29858 for (i = 0; base_types.iterate (i, &base_type); i++)
29859 {
29860 base_type->die_mark = 0;
29861 base_type->die_sib = c->die_sib;
29862 c->die_sib = base_type;
29863 c = base_type;
29864 }
29865 }
29866
29867 /* Helper function for resolve_addr, attempt to resolve
29868 one CONST_STRING, return true if successful. Similarly verify that
29869 SYMBOL_REFs refer to variables emitted in the current CU. */
29870
29871 static bool
29872 resolve_one_addr (rtx *addr)
29873 {
29874 rtx rtl = *addr;
29875
29876 if (GET_CODE (rtl) == CONST_STRING)
29877 {
29878 size_t len = strlen (XSTR (rtl, 0)) + 1;
29879 tree t = build_string (len, XSTR (rtl, 0));
29880 tree tlen = size_int (len - 1);
29881 TREE_TYPE (t)
29882 = build_array_type (char_type_node, build_index_type (tlen));
29883 rtl = lookup_constant_def (t);
29884 if (!rtl || !MEM_P (rtl))
29885 return false;
29886 rtl = XEXP (rtl, 0);
29887 if (GET_CODE (rtl) == SYMBOL_REF
29888 && SYMBOL_REF_DECL (rtl)
29889 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29890 return false;
29891 vec_safe_push (used_rtx_array, rtl);
29892 *addr = rtl;
29893 return true;
29894 }
29895
29896 if (GET_CODE (rtl) == SYMBOL_REF
29897 && SYMBOL_REF_DECL (rtl))
29898 {
29899 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29900 {
29901 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29902 return false;
29903 }
29904 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29905 return false;
29906 }
29907
29908 if (GET_CODE (rtl) == CONST)
29909 {
29910 subrtx_ptr_iterator::array_type array;
29911 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29912 if (!resolve_one_addr (*iter))
29913 return false;
29914 }
29915
29916 return true;
29917 }
29918
29919 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29920 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29921 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29922
29923 static rtx
29924 string_cst_pool_decl (tree t)
29925 {
29926 rtx rtl = output_constant_def (t, 1);
29927 unsigned char *array;
29928 dw_loc_descr_ref l;
29929 tree decl;
29930 size_t len;
29931 dw_die_ref ref;
29932
29933 if (!rtl || !MEM_P (rtl))
29934 return NULL_RTX;
29935 rtl = XEXP (rtl, 0);
29936 if (GET_CODE (rtl) != SYMBOL_REF
29937 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29938 return NULL_RTX;
29939
29940 decl = SYMBOL_REF_DECL (rtl);
29941 if (!lookup_decl_die (decl))
29942 {
29943 len = TREE_STRING_LENGTH (t);
29944 vec_safe_push (used_rtx_array, rtl);
29945 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29946 array = ggc_vec_alloc<unsigned char> (len);
29947 memcpy (array, TREE_STRING_POINTER (t), len);
29948 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29949 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29950 l->dw_loc_oprnd2.v.val_vec.length = len;
29951 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29952 l->dw_loc_oprnd2.v.val_vec.array = array;
29953 add_AT_loc (ref, DW_AT_location, l);
29954 equate_decl_number_to_die (decl, ref);
29955 }
29956 return rtl;
29957 }
29958
29959 /* Helper function of resolve_addr_in_expr. LOC is
29960 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29961 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29962 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29963 with DW_OP_implicit_pointer if possible
29964 and return true, if unsuccessful, return false. */
29965
29966 static bool
29967 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29968 {
29969 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29970 HOST_WIDE_INT offset = 0;
29971 dw_die_ref ref = NULL;
29972 tree decl;
29973
29974 if (GET_CODE (rtl) == CONST
29975 && GET_CODE (XEXP (rtl, 0)) == PLUS
29976 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29977 {
29978 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29979 rtl = XEXP (XEXP (rtl, 0), 0);
29980 }
29981 if (GET_CODE (rtl) == CONST_STRING)
29982 {
29983 size_t len = strlen (XSTR (rtl, 0)) + 1;
29984 tree t = build_string (len, XSTR (rtl, 0));
29985 tree tlen = size_int (len - 1);
29986
29987 TREE_TYPE (t)
29988 = build_array_type (char_type_node, build_index_type (tlen));
29989 rtl = string_cst_pool_decl (t);
29990 if (!rtl)
29991 return false;
29992 }
29993 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29994 {
29995 decl = SYMBOL_REF_DECL (rtl);
29996 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29997 {
29998 ref = lookup_decl_die (decl);
29999 if (ref && (get_AT (ref, DW_AT_location)
30000 || get_AT (ref, DW_AT_const_value)))
30001 {
30002 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
30003 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30004 loc->dw_loc_oprnd1.val_entry = NULL;
30005 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30006 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30007 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30008 loc->dw_loc_oprnd2.v.val_int = offset;
30009 return true;
30010 }
30011 }
30012 }
30013 return false;
30014 }
30015
30016 /* Helper function for resolve_addr, handle one location
30017 expression, return false if at least one CONST_STRING or SYMBOL_REF in
30018 the location list couldn't be resolved. */
30019
30020 static bool
30021 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30022 {
30023 dw_loc_descr_ref keep = NULL;
30024 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
30025 switch (loc->dw_loc_opc)
30026 {
30027 case DW_OP_addr:
30028 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30029 {
30030 if ((prev == NULL
30031 || prev->dw_loc_opc == DW_OP_piece
30032 || prev->dw_loc_opc == DW_OP_bit_piece)
30033 && loc->dw_loc_next
30034 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
30035 && (!dwarf_strict || dwarf_version >= 5)
30036 && optimize_one_addr_into_implicit_ptr (loc))
30037 break;
30038 return false;
30039 }
30040 break;
30041 case DW_OP_GNU_addr_index:
30042 case DW_OP_addrx:
30043 case DW_OP_GNU_const_index:
30044 case DW_OP_constx:
30045 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
30046 || loc->dw_loc_opc == DW_OP_addrx)
30047 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
30048 || loc->dw_loc_opc == DW_OP_constx)
30049 && loc->dtprel))
30050 {
30051 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
30052 if (!resolve_one_addr (&rtl))
30053 return false;
30054 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
30055 loc->dw_loc_oprnd1.val_entry
30056 = add_addr_table_entry (rtl, ate_kind_rtx);
30057 }
30058 break;
30059 case DW_OP_const4u:
30060 case DW_OP_const8u:
30061 if (loc->dtprel
30062 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30063 return false;
30064 break;
30065 case DW_OP_plus_uconst:
30066 if (size_of_loc_descr (loc)
30067 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30068 + 1
30069 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30070 {
30071 dw_loc_descr_ref repl
30072 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30073 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30074 add_loc_descr (&repl, loc->dw_loc_next);
30075 *loc = *repl;
30076 }
30077 break;
30078 case DW_OP_implicit_value:
30079 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30080 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30081 return false;
30082 break;
30083 case DW_OP_implicit_pointer:
30084 case DW_OP_GNU_implicit_pointer:
30085 case DW_OP_GNU_parameter_ref:
30086 case DW_OP_GNU_variable_value:
30087 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30088 {
30089 dw_die_ref ref
30090 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30091 if (ref == NULL)
30092 return false;
30093 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30094 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30095 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30096 }
30097 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30098 {
30099 if (prev == NULL
30100 && loc->dw_loc_next == NULL
30101 && AT_class (a) == dw_val_class_loc)
30102 switch (a->dw_attr)
30103 {
30104 /* Following attributes allow both exprloc and reference,
30105 so if the whole expression is DW_OP_GNU_variable_value
30106 alone we could transform it into reference. */
30107 case DW_AT_byte_size:
30108 case DW_AT_bit_size:
30109 case DW_AT_lower_bound:
30110 case DW_AT_upper_bound:
30111 case DW_AT_bit_stride:
30112 case DW_AT_count:
30113 case DW_AT_allocated:
30114 case DW_AT_associated:
30115 case DW_AT_byte_stride:
30116 a->dw_attr_val.val_class = dw_val_class_die_ref;
30117 a->dw_attr_val.val_entry = NULL;
30118 a->dw_attr_val.v.val_die_ref.die
30119 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30120 a->dw_attr_val.v.val_die_ref.external = 0;
30121 return true;
30122 default:
30123 break;
30124 }
30125 if (dwarf_strict)
30126 return false;
30127 }
30128 break;
30129 case DW_OP_const_type:
30130 case DW_OP_regval_type:
30131 case DW_OP_deref_type:
30132 case DW_OP_convert:
30133 case DW_OP_reinterpret:
30134 case DW_OP_GNU_const_type:
30135 case DW_OP_GNU_regval_type:
30136 case DW_OP_GNU_deref_type:
30137 case DW_OP_GNU_convert:
30138 case DW_OP_GNU_reinterpret:
30139 while (loc->dw_loc_next
30140 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30141 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30142 {
30143 dw_die_ref base1, base2;
30144 unsigned enc1, enc2, size1, size2;
30145 if (loc->dw_loc_opc == DW_OP_regval_type
30146 || loc->dw_loc_opc == DW_OP_deref_type
30147 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30148 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30149 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30150 else if (loc->dw_loc_oprnd1.val_class
30151 == dw_val_class_unsigned_const)
30152 break;
30153 else
30154 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30155 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30156 == dw_val_class_unsigned_const)
30157 break;
30158 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30159 gcc_assert (base1->die_tag == DW_TAG_base_type
30160 && base2->die_tag == DW_TAG_base_type);
30161 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30162 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30163 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30164 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30165 if (size1 == size2
30166 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30167 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30168 && loc != keep)
30169 || enc1 == enc2))
30170 {
30171 /* Optimize away next DW_OP_convert after
30172 adjusting LOC's base type die reference. */
30173 if (loc->dw_loc_opc == DW_OP_regval_type
30174 || loc->dw_loc_opc == DW_OP_deref_type
30175 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30176 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30177 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30178 else
30179 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30180 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30181 continue;
30182 }
30183 /* Don't change integer DW_OP_convert after e.g. floating
30184 point typed stack entry. */
30185 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30186 keep = loc->dw_loc_next;
30187 break;
30188 }
30189 break;
30190 default:
30191 break;
30192 }
30193 return true;
30194 }
30195
30196 /* Helper function of resolve_addr. DIE had DW_AT_location of
30197 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30198 and DW_OP_addr couldn't be resolved. resolve_addr has already
30199 removed the DW_AT_location attribute. This function attempts to
30200 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30201 to it or DW_AT_const_value attribute, if possible. */
30202
30203 static void
30204 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30205 {
30206 if (!VAR_P (decl)
30207 || lookup_decl_die (decl) != die
30208 || DECL_EXTERNAL (decl)
30209 || !TREE_STATIC (decl)
30210 || DECL_INITIAL (decl) == NULL_TREE
30211 || DECL_P (DECL_INITIAL (decl))
30212 || get_AT (die, DW_AT_const_value))
30213 return;
30214
30215 tree init = DECL_INITIAL (decl);
30216 HOST_WIDE_INT offset = 0;
30217 /* For variables that have been optimized away and thus
30218 don't have a memory location, see if we can emit
30219 DW_AT_const_value instead. */
30220 if (tree_add_const_value_attribute (die, init))
30221 return;
30222 if (dwarf_strict && dwarf_version < 5)
30223 return;
30224 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30225 and ADDR_EXPR refers to a decl that has DW_AT_location or
30226 DW_AT_const_value (but isn't addressable, otherwise
30227 resolving the original DW_OP_addr wouldn't fail), see if
30228 we can add DW_OP_implicit_pointer. */
30229 STRIP_NOPS (init);
30230 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30231 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30232 {
30233 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30234 init = TREE_OPERAND (init, 0);
30235 STRIP_NOPS (init);
30236 }
30237 if (TREE_CODE (init) != ADDR_EXPR)
30238 return;
30239 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30240 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30241 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30242 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30243 && TREE_OPERAND (init, 0) != decl))
30244 {
30245 dw_die_ref ref;
30246 dw_loc_descr_ref l;
30247
30248 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30249 {
30250 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30251 if (!rtl)
30252 return;
30253 decl = SYMBOL_REF_DECL (rtl);
30254 }
30255 else
30256 decl = TREE_OPERAND (init, 0);
30257 ref = lookup_decl_die (decl);
30258 if (ref == NULL
30259 || (!get_AT (ref, DW_AT_location)
30260 && !get_AT (ref, DW_AT_const_value)))
30261 return;
30262 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30263 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30264 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30265 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30266 add_AT_loc (die, DW_AT_location, l);
30267 }
30268 }
30269
30270 /* Return NULL if l is a DWARF expression, or first op that is not
30271 valid DWARF expression. */
30272
30273 static dw_loc_descr_ref
30274 non_dwarf_expression (dw_loc_descr_ref l)
30275 {
30276 while (l)
30277 {
30278 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30279 return l;
30280 switch (l->dw_loc_opc)
30281 {
30282 case DW_OP_regx:
30283 case DW_OP_implicit_value:
30284 case DW_OP_stack_value:
30285 case DW_OP_implicit_pointer:
30286 case DW_OP_GNU_implicit_pointer:
30287 case DW_OP_GNU_parameter_ref:
30288 case DW_OP_piece:
30289 case DW_OP_bit_piece:
30290 return l;
30291 default:
30292 break;
30293 }
30294 l = l->dw_loc_next;
30295 }
30296 return NULL;
30297 }
30298
30299 /* Return adjusted copy of EXPR:
30300 If it is empty DWARF expression, return it.
30301 If it is valid non-empty DWARF expression,
30302 return copy of EXPR with DW_OP_deref appended to it.
30303 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30304 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30305 If it is DWARF expression followed by DW_OP_stack_value, return
30306 copy of the DWARF expression without anything appended.
30307 Otherwise, return NULL. */
30308
30309 static dw_loc_descr_ref
30310 copy_deref_exprloc (dw_loc_descr_ref expr)
30311 {
30312 dw_loc_descr_ref tail = NULL;
30313
30314 if (expr == NULL)
30315 return NULL;
30316
30317 dw_loc_descr_ref l = non_dwarf_expression (expr);
30318 if (l && l->dw_loc_next)
30319 return NULL;
30320
30321 if (l)
30322 {
30323 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30324 tail = new_loc_descr ((enum dwarf_location_atom)
30325 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30326 0, 0);
30327 else
30328 switch (l->dw_loc_opc)
30329 {
30330 case DW_OP_regx:
30331 tail = new_loc_descr (DW_OP_bregx,
30332 l->dw_loc_oprnd1.v.val_unsigned, 0);
30333 break;
30334 case DW_OP_stack_value:
30335 break;
30336 default:
30337 return NULL;
30338 }
30339 }
30340 else
30341 tail = new_loc_descr (DW_OP_deref, 0, 0);
30342
30343 dw_loc_descr_ref ret = NULL, *p = &ret;
30344 while (expr != l)
30345 {
30346 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30347 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30348 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30349 p = &(*p)->dw_loc_next;
30350 expr = expr->dw_loc_next;
30351 }
30352 *p = tail;
30353 return ret;
30354 }
30355
30356 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30357 reference to a variable or argument, adjust it if needed and return:
30358 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30359 attribute if present should be removed
30360 0 keep the attribute perhaps with minor modifications, no need to rescan
30361 1 if the attribute has been successfully adjusted. */
30362
30363 static int
30364 optimize_string_length (dw_attr_node *a)
30365 {
30366 dw_loc_descr_ref l = AT_loc (a), lv;
30367 dw_die_ref die;
30368 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30369 {
30370 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30371 die = lookup_decl_die (decl);
30372 if (die)
30373 {
30374 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30375 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30376 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30377 }
30378 else
30379 return -1;
30380 }
30381 else
30382 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30383
30384 /* DWARF5 allows reference class, so we can then reference the DIE.
30385 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30386 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30387 {
30388 a->dw_attr_val.val_class = dw_val_class_die_ref;
30389 a->dw_attr_val.val_entry = NULL;
30390 a->dw_attr_val.v.val_die_ref.die = die;
30391 a->dw_attr_val.v.val_die_ref.external = 0;
30392 return 0;
30393 }
30394
30395 dw_attr_node *av = get_AT (die, DW_AT_location);
30396 dw_loc_list_ref d;
30397 bool non_dwarf_expr = false;
30398
30399 if (av == NULL)
30400 return dwarf_strict ? -1 : 0;
30401 switch (AT_class (av))
30402 {
30403 case dw_val_class_loc_list:
30404 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30405 if (d->expr && non_dwarf_expression (d->expr))
30406 non_dwarf_expr = true;
30407 break;
30408 case dw_val_class_view_list:
30409 gcc_unreachable ();
30410 case dw_val_class_loc:
30411 lv = AT_loc (av);
30412 if (lv == NULL)
30413 return dwarf_strict ? -1 : 0;
30414 if (non_dwarf_expression (lv))
30415 non_dwarf_expr = true;
30416 break;
30417 default:
30418 return dwarf_strict ? -1 : 0;
30419 }
30420
30421 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30422 into DW_OP_call4 or DW_OP_GNU_variable_value into
30423 DW_OP_call4 DW_OP_deref, do so. */
30424 if (!non_dwarf_expr
30425 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30426 {
30427 l->dw_loc_opc = DW_OP_call4;
30428 if (l->dw_loc_next)
30429 l->dw_loc_next = NULL;
30430 else
30431 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30432 return 0;
30433 }
30434
30435 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30436 copy over the DW_AT_location attribute from die to a. */
30437 if (l->dw_loc_next != NULL)
30438 {
30439 a->dw_attr_val = av->dw_attr_val;
30440 return 1;
30441 }
30442
30443 dw_loc_list_ref list, *p;
30444 switch (AT_class (av))
30445 {
30446 case dw_val_class_loc_list:
30447 p = &list;
30448 list = NULL;
30449 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30450 {
30451 lv = copy_deref_exprloc (d->expr);
30452 if (lv)
30453 {
30454 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30455 p = &(*p)->dw_loc_next;
30456 }
30457 else if (!dwarf_strict && d->expr)
30458 return 0;
30459 }
30460 if (list == NULL)
30461 return dwarf_strict ? -1 : 0;
30462 a->dw_attr_val.val_class = dw_val_class_loc_list;
30463 gen_llsym (list);
30464 *AT_loc_list_ptr (a) = list;
30465 return 1;
30466 case dw_val_class_loc:
30467 lv = copy_deref_exprloc (AT_loc (av));
30468 if (lv == NULL)
30469 return dwarf_strict ? -1 : 0;
30470 a->dw_attr_val.v.val_loc = lv;
30471 return 1;
30472 default:
30473 gcc_unreachable ();
30474 }
30475 }
30476
30477 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30478 an address in .rodata section if the string literal is emitted there,
30479 or remove the containing location list or replace DW_AT_const_value
30480 with DW_AT_location and empty location expression, if it isn't found
30481 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30482 to something that has been emitted in the current CU. */
30483
30484 static void
30485 resolve_addr (dw_die_ref die)
30486 {
30487 dw_die_ref c;
30488 dw_attr_node *a;
30489 dw_loc_list_ref *curr, *start, loc;
30490 unsigned ix;
30491 bool remove_AT_byte_size = false;
30492
30493 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30494 switch (AT_class (a))
30495 {
30496 case dw_val_class_loc_list:
30497 start = curr = AT_loc_list_ptr (a);
30498 loc = *curr;
30499 gcc_assert (loc);
30500 /* The same list can be referenced more than once. See if we have
30501 already recorded the result from a previous pass. */
30502 if (loc->replaced)
30503 *curr = loc->dw_loc_next;
30504 else if (!loc->resolved_addr)
30505 {
30506 /* As things stand, we do not expect or allow one die to
30507 reference a suffix of another die's location list chain.
30508 References must be identical or completely separate.
30509 There is therefore no need to cache the result of this
30510 pass on any list other than the first; doing so
30511 would lead to unnecessary writes. */
30512 while (*curr)
30513 {
30514 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30515 if (!resolve_addr_in_expr (a, (*curr)->expr))
30516 {
30517 dw_loc_list_ref next = (*curr)->dw_loc_next;
30518 dw_loc_descr_ref l = (*curr)->expr;
30519
30520 if (next && (*curr)->ll_symbol)
30521 {
30522 gcc_assert (!next->ll_symbol);
30523 next->ll_symbol = (*curr)->ll_symbol;
30524 next->vl_symbol = (*curr)->vl_symbol;
30525 }
30526 if (dwarf_split_debug_info)
30527 remove_loc_list_addr_table_entries (l);
30528 *curr = next;
30529 }
30530 else
30531 {
30532 mark_base_types ((*curr)->expr);
30533 curr = &(*curr)->dw_loc_next;
30534 }
30535 }
30536 if (loc == *start)
30537 loc->resolved_addr = 1;
30538 else
30539 {
30540 loc->replaced = 1;
30541 loc->dw_loc_next = *start;
30542 }
30543 }
30544 if (!*start)
30545 {
30546 remove_AT (die, a->dw_attr);
30547 ix--;
30548 }
30549 break;
30550 case dw_val_class_view_list:
30551 {
30552 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30553 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30554 dw_val_node *llnode
30555 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30556 /* If we no longer have a loclist, or it no longer needs
30557 views, drop this attribute. */
30558 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30559 {
30560 remove_AT (die, a->dw_attr);
30561 ix--;
30562 }
30563 break;
30564 }
30565 case dw_val_class_loc:
30566 {
30567 dw_loc_descr_ref l = AT_loc (a);
30568 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30569 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30570 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30571 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30572 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30573 with DW_FORM_ref referencing the same DIE as
30574 DW_OP_GNU_variable_value used to reference. */
30575 if (a->dw_attr == DW_AT_string_length
30576 && l
30577 && l->dw_loc_opc == DW_OP_GNU_variable_value
30578 && (l->dw_loc_next == NULL
30579 || (l->dw_loc_next->dw_loc_next == NULL
30580 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30581 {
30582 switch (optimize_string_length (a))
30583 {
30584 case -1:
30585 remove_AT (die, a->dw_attr);
30586 ix--;
30587 /* If we drop DW_AT_string_length, we need to drop also
30588 DW_AT_{string_length_,}byte_size. */
30589 remove_AT_byte_size = true;
30590 continue;
30591 default:
30592 break;
30593 case 1:
30594 /* Even if we keep the optimized DW_AT_string_length,
30595 it might have changed AT_class, so process it again. */
30596 ix--;
30597 continue;
30598 }
30599 }
30600 /* For -gdwarf-2 don't attempt to optimize
30601 DW_AT_data_member_location containing
30602 DW_OP_plus_uconst - older consumers might
30603 rely on it being that op instead of a more complex,
30604 but shorter, location description. */
30605 if ((dwarf_version > 2
30606 || a->dw_attr != DW_AT_data_member_location
30607 || l == NULL
30608 || l->dw_loc_opc != DW_OP_plus_uconst
30609 || l->dw_loc_next != NULL)
30610 && !resolve_addr_in_expr (a, l))
30611 {
30612 if (dwarf_split_debug_info)
30613 remove_loc_list_addr_table_entries (l);
30614 if (l != NULL
30615 && l->dw_loc_next == NULL
30616 && l->dw_loc_opc == DW_OP_addr
30617 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30618 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30619 && a->dw_attr == DW_AT_location)
30620 {
30621 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30622 remove_AT (die, a->dw_attr);
30623 ix--;
30624 optimize_location_into_implicit_ptr (die, decl);
30625 break;
30626 }
30627 if (a->dw_attr == DW_AT_string_length)
30628 /* If we drop DW_AT_string_length, we need to drop also
30629 DW_AT_{string_length_,}byte_size. */
30630 remove_AT_byte_size = true;
30631 remove_AT (die, a->dw_attr);
30632 ix--;
30633 }
30634 else
30635 mark_base_types (l);
30636 }
30637 break;
30638 case dw_val_class_addr:
30639 if (a->dw_attr == DW_AT_const_value
30640 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30641 {
30642 if (AT_index (a) != NOT_INDEXED)
30643 remove_addr_table_entry (a->dw_attr_val.val_entry);
30644 remove_AT (die, a->dw_attr);
30645 ix--;
30646 }
30647 if ((die->die_tag == DW_TAG_call_site
30648 && a->dw_attr == DW_AT_call_origin)
30649 || (die->die_tag == DW_TAG_GNU_call_site
30650 && a->dw_attr == DW_AT_abstract_origin))
30651 {
30652 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30653 dw_die_ref tdie = lookup_decl_die (tdecl);
30654 dw_die_ref cdie;
30655 if (tdie == NULL
30656 && DECL_EXTERNAL (tdecl)
30657 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30658 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30659 {
30660 dw_die_ref pdie = cdie;
30661 /* Make sure we don't add these DIEs into type units.
30662 We could emit skeleton DIEs for context (namespaces,
30663 outer structs/classes) and a skeleton DIE for the
30664 innermost context with DW_AT_signature pointing to the
30665 type unit. See PR78835. */
30666 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30667 pdie = pdie->die_parent;
30668 if (pdie == NULL)
30669 {
30670 /* Creating a full DIE for tdecl is overly expensive and
30671 at this point even wrong when in the LTO phase
30672 as it can end up generating new type DIEs we didn't
30673 output and thus optimize_external_refs will crash. */
30674 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30675 add_AT_flag (tdie, DW_AT_external, 1);
30676 add_AT_flag (tdie, DW_AT_declaration, 1);
30677 add_linkage_attr (tdie, tdecl);
30678 add_name_and_src_coords_attributes (tdie, tdecl, true);
30679 equate_decl_number_to_die (tdecl, tdie);
30680 }
30681 }
30682 if (tdie)
30683 {
30684 a->dw_attr_val.val_class = dw_val_class_die_ref;
30685 a->dw_attr_val.v.val_die_ref.die = tdie;
30686 a->dw_attr_val.v.val_die_ref.external = 0;
30687 }
30688 else
30689 {
30690 if (AT_index (a) != NOT_INDEXED)
30691 remove_addr_table_entry (a->dw_attr_val.val_entry);
30692 remove_AT (die, a->dw_attr);
30693 ix--;
30694 }
30695 }
30696 break;
30697 default:
30698 break;
30699 }
30700
30701 if (remove_AT_byte_size)
30702 remove_AT (die, dwarf_version >= 5
30703 ? DW_AT_string_length_byte_size
30704 : DW_AT_byte_size);
30705
30706 FOR_EACH_CHILD (die, c, resolve_addr (c));
30707 }
30708 \f
30709 /* Helper routines for optimize_location_lists.
30710 This pass tries to share identical local lists in .debug_loc
30711 section. */
30712
30713 /* Iteratively hash operands of LOC opcode into HSTATE. */
30714
30715 static void
30716 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30717 {
30718 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30719 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30720
30721 switch (loc->dw_loc_opc)
30722 {
30723 case DW_OP_const4u:
30724 case DW_OP_const8u:
30725 if (loc->dtprel)
30726 goto hash_addr;
30727 /* FALLTHRU */
30728 case DW_OP_const1u:
30729 case DW_OP_const1s:
30730 case DW_OP_const2u:
30731 case DW_OP_const2s:
30732 case DW_OP_const4s:
30733 case DW_OP_const8s:
30734 case DW_OP_constu:
30735 case DW_OP_consts:
30736 case DW_OP_pick:
30737 case DW_OP_plus_uconst:
30738 case DW_OP_breg0:
30739 case DW_OP_breg1:
30740 case DW_OP_breg2:
30741 case DW_OP_breg3:
30742 case DW_OP_breg4:
30743 case DW_OP_breg5:
30744 case DW_OP_breg6:
30745 case DW_OP_breg7:
30746 case DW_OP_breg8:
30747 case DW_OP_breg9:
30748 case DW_OP_breg10:
30749 case DW_OP_breg11:
30750 case DW_OP_breg12:
30751 case DW_OP_breg13:
30752 case DW_OP_breg14:
30753 case DW_OP_breg15:
30754 case DW_OP_breg16:
30755 case DW_OP_breg17:
30756 case DW_OP_breg18:
30757 case DW_OP_breg19:
30758 case DW_OP_breg20:
30759 case DW_OP_breg21:
30760 case DW_OP_breg22:
30761 case DW_OP_breg23:
30762 case DW_OP_breg24:
30763 case DW_OP_breg25:
30764 case DW_OP_breg26:
30765 case DW_OP_breg27:
30766 case DW_OP_breg28:
30767 case DW_OP_breg29:
30768 case DW_OP_breg30:
30769 case DW_OP_breg31:
30770 case DW_OP_regx:
30771 case DW_OP_fbreg:
30772 case DW_OP_piece:
30773 case DW_OP_deref_size:
30774 case DW_OP_xderef_size:
30775 hstate.add_object (val1->v.val_int);
30776 break;
30777 case DW_OP_skip:
30778 case DW_OP_bra:
30779 {
30780 int offset;
30781
30782 gcc_assert (val1->val_class == dw_val_class_loc);
30783 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30784 hstate.add_object (offset);
30785 }
30786 break;
30787 case DW_OP_implicit_value:
30788 hstate.add_object (val1->v.val_unsigned);
30789 switch (val2->val_class)
30790 {
30791 case dw_val_class_const:
30792 hstate.add_object (val2->v.val_int);
30793 break;
30794 case dw_val_class_vec:
30795 {
30796 unsigned int elt_size = val2->v.val_vec.elt_size;
30797 unsigned int len = val2->v.val_vec.length;
30798
30799 hstate.add_int (elt_size);
30800 hstate.add_int (len);
30801 hstate.add (val2->v.val_vec.array, len * elt_size);
30802 }
30803 break;
30804 case dw_val_class_const_double:
30805 hstate.add_object (val2->v.val_double.low);
30806 hstate.add_object (val2->v.val_double.high);
30807 break;
30808 case dw_val_class_wide_int:
30809 hstate.add (val2->v.val_wide->get_val (),
30810 get_full_len (*val2->v.val_wide)
30811 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30812 break;
30813 case dw_val_class_addr:
30814 inchash::add_rtx (val2->v.val_addr, hstate);
30815 break;
30816 default:
30817 gcc_unreachable ();
30818 }
30819 break;
30820 case DW_OP_bregx:
30821 case DW_OP_bit_piece:
30822 hstate.add_object (val1->v.val_int);
30823 hstate.add_object (val2->v.val_int);
30824 break;
30825 case DW_OP_addr:
30826 hash_addr:
30827 if (loc->dtprel)
30828 {
30829 unsigned char dtprel = 0xd1;
30830 hstate.add_object (dtprel);
30831 }
30832 inchash::add_rtx (val1->v.val_addr, hstate);
30833 break;
30834 case DW_OP_GNU_addr_index:
30835 case DW_OP_addrx:
30836 case DW_OP_GNU_const_index:
30837 case DW_OP_constx:
30838 {
30839 if (loc->dtprel)
30840 {
30841 unsigned char dtprel = 0xd1;
30842 hstate.add_object (dtprel);
30843 }
30844 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30845 }
30846 break;
30847 case DW_OP_implicit_pointer:
30848 case DW_OP_GNU_implicit_pointer:
30849 hstate.add_int (val2->v.val_int);
30850 break;
30851 case DW_OP_entry_value:
30852 case DW_OP_GNU_entry_value:
30853 hstate.add_object (val1->v.val_loc);
30854 break;
30855 case DW_OP_regval_type:
30856 case DW_OP_deref_type:
30857 case DW_OP_GNU_regval_type:
30858 case DW_OP_GNU_deref_type:
30859 {
30860 unsigned int byte_size
30861 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30862 unsigned int encoding
30863 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30864 hstate.add_object (val1->v.val_int);
30865 hstate.add_object (byte_size);
30866 hstate.add_object (encoding);
30867 }
30868 break;
30869 case DW_OP_convert:
30870 case DW_OP_reinterpret:
30871 case DW_OP_GNU_convert:
30872 case DW_OP_GNU_reinterpret:
30873 if (val1->val_class == dw_val_class_unsigned_const)
30874 {
30875 hstate.add_object (val1->v.val_unsigned);
30876 break;
30877 }
30878 /* FALLTHRU */
30879 case DW_OP_const_type:
30880 case DW_OP_GNU_const_type:
30881 {
30882 unsigned int byte_size
30883 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30884 unsigned int encoding
30885 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30886 hstate.add_object (byte_size);
30887 hstate.add_object (encoding);
30888 if (loc->dw_loc_opc != DW_OP_const_type
30889 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30890 break;
30891 hstate.add_object (val2->val_class);
30892 switch (val2->val_class)
30893 {
30894 case dw_val_class_const:
30895 hstate.add_object (val2->v.val_int);
30896 break;
30897 case dw_val_class_vec:
30898 {
30899 unsigned int elt_size = val2->v.val_vec.elt_size;
30900 unsigned int len = val2->v.val_vec.length;
30901
30902 hstate.add_object (elt_size);
30903 hstate.add_object (len);
30904 hstate.add (val2->v.val_vec.array, len * elt_size);
30905 }
30906 break;
30907 case dw_val_class_const_double:
30908 hstate.add_object (val2->v.val_double.low);
30909 hstate.add_object (val2->v.val_double.high);
30910 break;
30911 case dw_val_class_wide_int:
30912 hstate.add (val2->v.val_wide->get_val (),
30913 get_full_len (*val2->v.val_wide)
30914 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30915 break;
30916 default:
30917 gcc_unreachable ();
30918 }
30919 }
30920 break;
30921
30922 default:
30923 /* Other codes have no operands. */
30924 break;
30925 }
30926 }
30927
30928 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30929
30930 static inline void
30931 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30932 {
30933 dw_loc_descr_ref l;
30934 bool sizes_computed = false;
30935 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30936 size_of_locs (loc);
30937
30938 for (l = loc; l != NULL; l = l->dw_loc_next)
30939 {
30940 enum dwarf_location_atom opc = l->dw_loc_opc;
30941 hstate.add_object (opc);
30942 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30943 {
30944 size_of_locs (loc);
30945 sizes_computed = true;
30946 }
30947 hash_loc_operands (l, hstate);
30948 }
30949 }
30950
30951 /* Compute hash of the whole location list LIST_HEAD. */
30952
30953 static inline void
30954 hash_loc_list (dw_loc_list_ref list_head)
30955 {
30956 dw_loc_list_ref curr = list_head;
30957 inchash::hash hstate;
30958
30959 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30960 {
30961 hstate.add (curr->begin, strlen (curr->begin) + 1);
30962 hstate.add (curr->end, strlen (curr->end) + 1);
30963 hstate.add_object (curr->vbegin);
30964 hstate.add_object (curr->vend);
30965 if (curr->section)
30966 hstate.add (curr->section, strlen (curr->section) + 1);
30967 hash_locs (curr->expr, hstate);
30968 }
30969 list_head->hash = hstate.end ();
30970 }
30971
30972 /* Return true if X and Y opcodes have the same operands. */
30973
30974 static inline bool
30975 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30976 {
30977 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30978 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30979 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30980 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30981
30982 switch (x->dw_loc_opc)
30983 {
30984 case DW_OP_const4u:
30985 case DW_OP_const8u:
30986 if (x->dtprel)
30987 goto hash_addr;
30988 /* FALLTHRU */
30989 case DW_OP_const1u:
30990 case DW_OP_const1s:
30991 case DW_OP_const2u:
30992 case DW_OP_const2s:
30993 case DW_OP_const4s:
30994 case DW_OP_const8s:
30995 case DW_OP_constu:
30996 case DW_OP_consts:
30997 case DW_OP_pick:
30998 case DW_OP_plus_uconst:
30999 case DW_OP_breg0:
31000 case DW_OP_breg1:
31001 case DW_OP_breg2:
31002 case DW_OP_breg3:
31003 case DW_OP_breg4:
31004 case DW_OP_breg5:
31005 case DW_OP_breg6:
31006 case DW_OP_breg7:
31007 case DW_OP_breg8:
31008 case DW_OP_breg9:
31009 case DW_OP_breg10:
31010 case DW_OP_breg11:
31011 case DW_OP_breg12:
31012 case DW_OP_breg13:
31013 case DW_OP_breg14:
31014 case DW_OP_breg15:
31015 case DW_OP_breg16:
31016 case DW_OP_breg17:
31017 case DW_OP_breg18:
31018 case DW_OP_breg19:
31019 case DW_OP_breg20:
31020 case DW_OP_breg21:
31021 case DW_OP_breg22:
31022 case DW_OP_breg23:
31023 case DW_OP_breg24:
31024 case DW_OP_breg25:
31025 case DW_OP_breg26:
31026 case DW_OP_breg27:
31027 case DW_OP_breg28:
31028 case DW_OP_breg29:
31029 case DW_OP_breg30:
31030 case DW_OP_breg31:
31031 case DW_OP_regx:
31032 case DW_OP_fbreg:
31033 case DW_OP_piece:
31034 case DW_OP_deref_size:
31035 case DW_OP_xderef_size:
31036 return valx1->v.val_int == valy1->v.val_int;
31037 case DW_OP_skip:
31038 case DW_OP_bra:
31039 /* If splitting debug info, the use of DW_OP_GNU_addr_index
31040 can cause irrelevant differences in dw_loc_addr. */
31041 gcc_assert (valx1->val_class == dw_val_class_loc
31042 && valy1->val_class == dw_val_class_loc
31043 && (dwarf_split_debug_info
31044 || x->dw_loc_addr == y->dw_loc_addr));
31045 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
31046 case DW_OP_implicit_value:
31047 if (valx1->v.val_unsigned != valy1->v.val_unsigned
31048 || valx2->val_class != valy2->val_class)
31049 return false;
31050 switch (valx2->val_class)
31051 {
31052 case dw_val_class_const:
31053 return valx2->v.val_int == valy2->v.val_int;
31054 case dw_val_class_vec:
31055 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31056 && valx2->v.val_vec.length == valy2->v.val_vec.length
31057 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31058 valx2->v.val_vec.elt_size
31059 * valx2->v.val_vec.length) == 0;
31060 case dw_val_class_const_double:
31061 return valx2->v.val_double.low == valy2->v.val_double.low
31062 && valx2->v.val_double.high == valy2->v.val_double.high;
31063 case dw_val_class_wide_int:
31064 return *valx2->v.val_wide == *valy2->v.val_wide;
31065 case dw_val_class_addr:
31066 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31067 default:
31068 gcc_unreachable ();
31069 }
31070 case DW_OP_bregx:
31071 case DW_OP_bit_piece:
31072 return valx1->v.val_int == valy1->v.val_int
31073 && valx2->v.val_int == valy2->v.val_int;
31074 case DW_OP_addr:
31075 hash_addr:
31076 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31077 case DW_OP_GNU_addr_index:
31078 case DW_OP_addrx:
31079 case DW_OP_GNU_const_index:
31080 case DW_OP_constx:
31081 {
31082 rtx ax1 = valx1->val_entry->addr.rtl;
31083 rtx ay1 = valy1->val_entry->addr.rtl;
31084 return rtx_equal_p (ax1, ay1);
31085 }
31086 case DW_OP_implicit_pointer:
31087 case DW_OP_GNU_implicit_pointer:
31088 return valx1->val_class == dw_val_class_die_ref
31089 && valx1->val_class == valy1->val_class
31090 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31091 && valx2->v.val_int == valy2->v.val_int;
31092 case DW_OP_entry_value:
31093 case DW_OP_GNU_entry_value:
31094 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31095 case DW_OP_const_type:
31096 case DW_OP_GNU_const_type:
31097 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31098 || valx2->val_class != valy2->val_class)
31099 return false;
31100 switch (valx2->val_class)
31101 {
31102 case dw_val_class_const:
31103 return valx2->v.val_int == valy2->v.val_int;
31104 case dw_val_class_vec:
31105 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31106 && valx2->v.val_vec.length == valy2->v.val_vec.length
31107 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31108 valx2->v.val_vec.elt_size
31109 * valx2->v.val_vec.length) == 0;
31110 case dw_val_class_const_double:
31111 return valx2->v.val_double.low == valy2->v.val_double.low
31112 && valx2->v.val_double.high == valy2->v.val_double.high;
31113 case dw_val_class_wide_int:
31114 return *valx2->v.val_wide == *valy2->v.val_wide;
31115 default:
31116 gcc_unreachable ();
31117 }
31118 case DW_OP_regval_type:
31119 case DW_OP_deref_type:
31120 case DW_OP_GNU_regval_type:
31121 case DW_OP_GNU_deref_type:
31122 return valx1->v.val_int == valy1->v.val_int
31123 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31124 case DW_OP_convert:
31125 case DW_OP_reinterpret:
31126 case DW_OP_GNU_convert:
31127 case DW_OP_GNU_reinterpret:
31128 if (valx1->val_class != valy1->val_class)
31129 return false;
31130 if (valx1->val_class == dw_val_class_unsigned_const)
31131 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31132 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31133 case DW_OP_GNU_parameter_ref:
31134 return valx1->val_class == dw_val_class_die_ref
31135 && valx1->val_class == valy1->val_class
31136 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31137 default:
31138 /* Other codes have no operands. */
31139 return true;
31140 }
31141 }
31142
31143 /* Return true if DWARF location expressions X and Y are the same. */
31144
31145 static inline bool
31146 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31147 {
31148 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31149 if (x->dw_loc_opc != y->dw_loc_opc
31150 || x->dtprel != y->dtprel
31151 || !compare_loc_operands (x, y))
31152 break;
31153 return x == NULL && y == NULL;
31154 }
31155
31156 /* Hashtable helpers. */
31157
31158 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31159 {
31160 static inline hashval_t hash (const dw_loc_list_struct *);
31161 static inline bool equal (const dw_loc_list_struct *,
31162 const dw_loc_list_struct *);
31163 };
31164
31165 /* Return precomputed hash of location list X. */
31166
31167 inline hashval_t
31168 loc_list_hasher::hash (const dw_loc_list_struct *x)
31169 {
31170 return x->hash;
31171 }
31172
31173 /* Return true if location lists A and B are the same. */
31174
31175 inline bool
31176 loc_list_hasher::equal (const dw_loc_list_struct *a,
31177 const dw_loc_list_struct *b)
31178 {
31179 if (a == b)
31180 return 1;
31181 if (a->hash != b->hash)
31182 return 0;
31183 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31184 if (strcmp (a->begin, b->begin) != 0
31185 || strcmp (a->end, b->end) != 0
31186 || (a->section == NULL) != (b->section == NULL)
31187 || (a->section && strcmp (a->section, b->section) != 0)
31188 || a->vbegin != b->vbegin || a->vend != b->vend
31189 || !compare_locs (a->expr, b->expr))
31190 break;
31191 return a == NULL && b == NULL;
31192 }
31193
31194 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31195
31196
31197 /* Recursively optimize location lists referenced from DIE
31198 children and share them whenever possible. */
31199
31200 static void
31201 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31202 {
31203 dw_die_ref c;
31204 dw_attr_node *a;
31205 unsigned ix;
31206 dw_loc_list_struct **slot;
31207 bool drop_locviews = false;
31208 bool has_locviews = false;
31209
31210 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31211 if (AT_class (a) == dw_val_class_loc_list)
31212 {
31213 dw_loc_list_ref list = AT_loc_list (a);
31214 /* TODO: perform some optimizations here, before hashing
31215 it and storing into the hash table. */
31216 hash_loc_list (list);
31217 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31218 if (*slot == NULL)
31219 {
31220 *slot = list;
31221 if (loc_list_has_views (list))
31222 gcc_assert (list->vl_symbol);
31223 else if (list->vl_symbol)
31224 {
31225 drop_locviews = true;
31226 list->vl_symbol = NULL;
31227 }
31228 }
31229 else
31230 {
31231 if (list->vl_symbol && !(*slot)->vl_symbol)
31232 drop_locviews = true;
31233 a->dw_attr_val.v.val_loc_list = *slot;
31234 }
31235 }
31236 else if (AT_class (a) == dw_val_class_view_list)
31237 {
31238 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31239 has_locviews = true;
31240 }
31241
31242
31243 if (drop_locviews && has_locviews)
31244 remove_AT (die, DW_AT_GNU_locviews);
31245
31246 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31247 }
31248
31249
31250 /* Recursively assign each location list a unique index into the debug_addr
31251 section. */
31252
31253 static void
31254 index_location_lists (dw_die_ref die)
31255 {
31256 dw_die_ref c;
31257 dw_attr_node *a;
31258 unsigned ix;
31259
31260 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31261 if (AT_class (a) == dw_val_class_loc_list)
31262 {
31263 dw_loc_list_ref list = AT_loc_list (a);
31264 dw_loc_list_ref curr;
31265 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31266 {
31267 /* Don't index an entry that has already been indexed
31268 or won't be output. Make sure skip_loc_list_entry doesn't
31269 call size_of_locs, because that might cause circular dependency,
31270 index_location_lists requiring address table indexes to be
31271 computed, but adding new indexes through add_addr_table_entry
31272 and address table index computation requiring no new additions
31273 to the hash table. In the rare case of DWARF[234] >= 64KB
31274 location expression, we'll just waste unused address table entry
31275 for it. */
31276 if (curr->begin_entry != NULL
31277 || skip_loc_list_entry (curr))
31278 continue;
31279
31280 curr->begin_entry
31281 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31282 }
31283 }
31284
31285 FOR_EACH_CHILD (die, c, index_location_lists (c));
31286 }
31287
31288 /* Optimize location lists referenced from DIE
31289 children and share them whenever possible. */
31290
31291 static void
31292 optimize_location_lists (dw_die_ref die)
31293 {
31294 loc_list_hash_type htab (500);
31295 optimize_location_lists_1 (die, &htab);
31296 }
31297 \f
31298 /* Traverse the limbo die list, and add parent/child links. The only
31299 dies without parents that should be here are concrete instances of
31300 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31301 For concrete instances, we can get the parent die from the abstract
31302 instance. */
31303
31304 static void
31305 flush_limbo_die_list (void)
31306 {
31307 limbo_die_node *node;
31308
31309 /* get_context_die calls force_decl_die, which can put new DIEs on the
31310 limbo list in LTO mode when nested functions are put in a different
31311 partition than that of their parent function. */
31312 while ((node = limbo_die_list))
31313 {
31314 dw_die_ref die = node->die;
31315 limbo_die_list = node->next;
31316
31317 if (die->die_parent == NULL)
31318 {
31319 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31320
31321 if (origin && origin->die_parent)
31322 add_child_die (origin->die_parent, die);
31323 else if (is_cu_die (die))
31324 ;
31325 else if (seen_error ())
31326 /* It's OK to be confused by errors in the input. */
31327 add_child_die (comp_unit_die (), die);
31328 else
31329 {
31330 /* In certain situations, the lexical block containing a
31331 nested function can be optimized away, which results
31332 in the nested function die being orphaned. Likewise
31333 with the return type of that nested function. Force
31334 this to be a child of the containing function.
31335
31336 It may happen that even the containing function got fully
31337 inlined and optimized out. In that case we are lost and
31338 assign the empty child. This should not be big issue as
31339 the function is likely unreachable too. */
31340 gcc_assert (node->created_for);
31341
31342 if (DECL_P (node->created_for))
31343 origin = get_context_die (DECL_CONTEXT (node->created_for));
31344 else if (TYPE_P (node->created_for))
31345 origin = scope_die_for (node->created_for, comp_unit_die ());
31346 else
31347 origin = comp_unit_die ();
31348
31349 add_child_die (origin, die);
31350 }
31351 }
31352 }
31353 }
31354
31355 /* Reset DIEs so we can output them again. */
31356
31357 static void
31358 reset_dies (dw_die_ref die)
31359 {
31360 dw_die_ref c;
31361
31362 /* Remove stuff we re-generate. */
31363 die->die_mark = 0;
31364 die->die_offset = 0;
31365 die->die_abbrev = 0;
31366 remove_AT (die, DW_AT_sibling);
31367
31368 FOR_EACH_CHILD (die, c, reset_dies (c));
31369 }
31370
31371 /* reset_indirect_string removed the references coming from DW_AT_name
31372 and DW_AT_comp_dir attributes on compilation unit DIEs. Readd them as
31373 .debug_line_str strings again. */
31374
31375 static void
31376 adjust_name_comp_dir (dw_die_ref die)
31377 {
31378 for (int i = 0; i < 2; i++)
31379 {
31380 dwarf_attribute attr_kind = i ? DW_AT_comp_dir : DW_AT_name;
31381 dw_attr_node *a = get_AT (die, attr_kind);
31382 if (a == NULL || a->dw_attr_val.val_class != dw_val_class_str)
31383 continue;
31384
31385 if (!debug_line_str_hash)
31386 debug_line_str_hash
31387 = hash_table<indirect_string_hasher>::create_ggc (10);
31388
31389 struct indirect_string_node *node
31390 = find_AT_string_in_table (a->dw_attr_val.v.val_str->str,
31391 debug_line_str_hash);
31392 set_indirect_string (node);
31393 node->form = DW_FORM_line_strp;
31394 a->dw_attr_val.v.val_str = node;
31395 }
31396 }
31397
31398 /* Output stuff that dwarf requires at the end of every file,
31399 and generate the DWARF-2 debugging info. */
31400
31401 static void
31402 dwarf2out_finish (const char *filename)
31403 {
31404 comdat_type_node *ctnode;
31405 dw_die_ref main_comp_unit_die;
31406 unsigned char checksum[16];
31407 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31408
31409 /* Flush out any latecomers to the limbo party. */
31410 flush_limbo_die_list ();
31411
31412 if (inline_entry_data_table)
31413 gcc_assert (inline_entry_data_table->is_empty ());
31414
31415 if (flag_checking)
31416 {
31417 verify_die (comp_unit_die ());
31418 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31419 verify_die (node->die);
31420 }
31421
31422 /* We shouldn't have any symbols with delayed asm names for
31423 DIEs generated after early finish. */
31424 gcc_assert (deferred_asm_name == NULL);
31425
31426 gen_remaining_tmpl_value_param_die_attribute ();
31427
31428 if (flag_generate_lto || flag_generate_offload)
31429 {
31430 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31431
31432 /* Prune stuff so that dwarf2out_finish runs successfully
31433 for the fat part of the object. */
31434 reset_dies (comp_unit_die ());
31435 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31436 reset_dies (node->die);
31437
31438 hash_table<comdat_type_hasher> comdat_type_table (100);
31439 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31440 {
31441 comdat_type_node **slot
31442 = comdat_type_table.find_slot (ctnode, INSERT);
31443
31444 /* Don't reset types twice. */
31445 if (*slot != HTAB_EMPTY_ENTRY)
31446 continue;
31447
31448 /* Remove the pointer to the line table. */
31449 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31450
31451 if (debug_info_level >= DINFO_LEVEL_TERSE)
31452 reset_dies (ctnode->root_die);
31453
31454 *slot = ctnode;
31455 }
31456
31457 /* Reset die CU symbol so we don't output it twice. */
31458 comp_unit_die ()->die_id.die_symbol = NULL;
31459
31460 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31461 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31462 if (have_macinfo)
31463 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31464
31465 /* Remove indirect string decisions. */
31466 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31467 if (debug_line_str_hash)
31468 {
31469 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31470 debug_line_str_hash = NULL;
31471 if (asm_outputs_debug_line_str ())
31472 {
31473 adjust_name_comp_dir (comp_unit_die ());
31474 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31475 adjust_name_comp_dir (node->die);
31476 }
31477 }
31478 }
31479
31480 #if ENABLE_ASSERT_CHECKING
31481 {
31482 dw_die_ref die = comp_unit_die (), c;
31483 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31484 }
31485 #endif
31486 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31487 resolve_addr (ctnode->root_die);
31488 resolve_addr (comp_unit_die ());
31489 move_marked_base_types ();
31490
31491 if (dump_file)
31492 {
31493 fprintf (dump_file, "DWARF for %s\n", filename);
31494 print_die (comp_unit_die (), dump_file);
31495 }
31496
31497 /* Initialize sections and labels used for actual assembler output. */
31498 unsigned generation = init_sections_and_labels (false);
31499
31500 /* Traverse the DIE's and add sibling attributes to those DIE's that
31501 have children. */
31502 add_sibling_attributes (comp_unit_die ());
31503 limbo_die_node *node;
31504 for (node = cu_die_list; node; node = node->next)
31505 add_sibling_attributes (node->die);
31506 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31507 add_sibling_attributes (ctnode->root_die);
31508
31509 /* When splitting DWARF info, we put some attributes in the
31510 skeleton compile_unit DIE that remains in the .o, while
31511 most attributes go in the DWO compile_unit_die. */
31512 if (dwarf_split_debug_info)
31513 {
31514 limbo_die_node *cu;
31515 main_comp_unit_die = gen_compile_unit_die (NULL);
31516 if (dwarf_version >= 5)
31517 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31518 cu = limbo_die_list;
31519 gcc_assert (cu->die == main_comp_unit_die);
31520 limbo_die_list = limbo_die_list->next;
31521 cu->next = cu_die_list;
31522 cu_die_list = cu;
31523 }
31524 else
31525 main_comp_unit_die = comp_unit_die ();
31526
31527 /* Output a terminator label for the .text section. */
31528 switch_to_section (text_section);
31529 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31530 if (cold_text_section)
31531 {
31532 switch_to_section (cold_text_section);
31533 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31534 }
31535
31536 /* We can only use the low/high_pc attributes if all of the code was
31537 in .text. */
31538 if (!have_multiple_function_sections
31539 || (dwarf_version < 3 && dwarf_strict))
31540 {
31541 /* Don't add if the CU has no associated code. */
31542 if (text_section_used)
31543 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31544 text_end_label, true);
31545 }
31546 else
31547 {
31548 unsigned fde_idx;
31549 dw_fde_ref fde;
31550 bool range_list_added = false;
31551
31552 if (text_section_used)
31553 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31554 text_end_label, &range_list_added, true);
31555 if (cold_text_section_used)
31556 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31557 cold_end_label, &range_list_added, true);
31558
31559 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31560 {
31561 if (DECL_IGNORED_P (fde->decl))
31562 continue;
31563 if (!fde->in_std_section)
31564 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31565 fde->dw_fde_end, &range_list_added,
31566 true);
31567 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31568 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31569 fde->dw_fde_second_end, &range_list_added,
31570 true);
31571 }
31572
31573 if (range_list_added)
31574 {
31575 /* We need to give .debug_loc and .debug_ranges an appropriate
31576 "base address". Use zero so that these addresses become
31577 absolute. Historically, we've emitted the unexpected
31578 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31579 Emit both to give time for other tools to adapt. */
31580 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31581 if (! dwarf_strict && dwarf_version < 4)
31582 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31583
31584 add_ranges (NULL);
31585 }
31586 }
31587
31588 /* AIX Assembler inserts the length, so adjust the reference to match the
31589 offset expected by debuggers. */
31590 strcpy (dl_section_ref, debug_line_section_label);
31591 if (XCOFF_DEBUGGING_INFO)
31592 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31593
31594 if (debug_info_level >= DINFO_LEVEL_TERSE)
31595 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31596 dl_section_ref);
31597
31598 if (have_macinfo)
31599 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31600 macinfo_section_label);
31601
31602 if (dwarf_split_debug_info)
31603 {
31604 if (have_location_lists)
31605 {
31606 /* Since we generate the loclists in the split DWARF .dwo
31607 file itself, we don't need to generate a loclists_base
31608 attribute for the split compile unit DIE. That attribute
31609 (and using relocatable sec_offset FORMs) isn't allowed
31610 for a split compile unit. Only if the .debug_loclists
31611 section was in the main file, would we need to generate a
31612 loclists_base attribute here (for the full or skeleton
31613 unit DIE). */
31614
31615 /* optimize_location_lists calculates the size of the lists,
31616 so index them first, and assign indices to the entries.
31617 Although optimize_location_lists will remove entries from
31618 the table, it only does so for duplicates, and therefore
31619 only reduces ref_counts to 1. */
31620 index_location_lists (comp_unit_die ());
31621 }
31622
31623 if (addr_index_table != NULL)
31624 {
31625 unsigned int index = 0;
31626 addr_index_table
31627 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31628 (&index);
31629 }
31630 }
31631
31632 loc_list_idx = 0;
31633 if (have_location_lists)
31634 {
31635 optimize_location_lists (comp_unit_die ());
31636 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31637 if (dwarf_version >= 5 && dwarf_split_debug_info)
31638 assign_location_list_indexes (comp_unit_die ());
31639 }
31640
31641 save_macinfo_strings ();
31642
31643 if (dwarf_split_debug_info)
31644 {
31645 unsigned int index = 0;
31646
31647 /* Add attributes common to skeleton compile_units and
31648 type_units. Because these attributes include strings, it
31649 must be done before freezing the string table. Top-level
31650 skeleton die attrs are added when the skeleton type unit is
31651 created, so ensure it is created by this point. */
31652 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31653 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31654 }
31655
31656 /* Output all of the compilation units. We put the main one last so that
31657 the offsets are available to output_pubnames. */
31658 for (node = cu_die_list; node; node = node->next)
31659 output_comp_unit (node->die, 0, NULL);
31660
31661 hash_table<comdat_type_hasher> comdat_type_table (100);
31662 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31663 {
31664 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31665
31666 /* Don't output duplicate types. */
31667 if (*slot != HTAB_EMPTY_ENTRY)
31668 continue;
31669
31670 /* Add a pointer to the line table for the main compilation unit
31671 so that the debugger can make sense of DW_AT_decl_file
31672 attributes. */
31673 if (debug_info_level >= DINFO_LEVEL_TERSE)
31674 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31675 (!dwarf_split_debug_info
31676 ? dl_section_ref
31677 : debug_skeleton_line_section_label));
31678
31679 output_comdat_type_unit (ctnode, false);
31680 *slot = ctnode;
31681 }
31682
31683 if (dwarf_split_debug_info)
31684 {
31685 int mark;
31686 struct md5_ctx ctx;
31687
31688 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31689 index_rnglists ();
31690
31691 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31692 md5_init_ctx (&ctx);
31693 mark = 0;
31694 die_checksum (comp_unit_die (), &ctx, &mark);
31695 unmark_all_dies (comp_unit_die ());
31696 md5_finish_ctx (&ctx, checksum);
31697
31698 if (dwarf_version < 5)
31699 {
31700 /* Use the first 8 bytes of the checksum as the dwo_id,
31701 and add it to both comp-unit DIEs. */
31702 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31703 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31704 }
31705
31706 /* Add the base offset of the ranges table to the skeleton
31707 comp-unit DIE. */
31708 if (!vec_safe_is_empty (ranges_table))
31709 {
31710 if (dwarf_version >= 5)
31711 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31712 ranges_base_label);
31713 else
31714 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31715 ranges_section_label);
31716 }
31717
31718 output_addr_table ();
31719 }
31720
31721 /* Output the main compilation unit if non-empty or if .debug_macinfo
31722 or .debug_macro will be emitted. */
31723 output_comp_unit (comp_unit_die (), have_macinfo,
31724 dwarf_split_debug_info ? checksum : NULL);
31725
31726 if (dwarf_split_debug_info && info_section_emitted)
31727 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31728
31729 /* Output the abbreviation table. */
31730 if (vec_safe_length (abbrev_die_table) != 1)
31731 {
31732 switch_to_section (debug_abbrev_section);
31733 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31734 output_abbrev_section ();
31735 }
31736
31737 /* Output location list section if necessary. */
31738 if (have_location_lists)
31739 {
31740 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31741 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31742 /* Output the location lists info. */
31743 switch_to_section (debug_loc_section);
31744 if (dwarf_version >= 5)
31745 {
31746 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31747 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31748 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
31749 dw2_asm_output_data (4, 0xffffffff,
31750 "Initial length escape value indicating "
31751 "64-bit DWARF extension");
31752 dw2_asm_output_delta (dwarf_offset_size, l2, l1,
31753 "Length of Location Lists");
31754 ASM_OUTPUT_LABEL (asm_out_file, l1);
31755 output_dwarf_version ();
31756 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31757 dw2_asm_output_data (1, 0, "Segment Size");
31758 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31759 "Offset Entry Count");
31760 }
31761 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31762 if (dwarf_version >= 5 && dwarf_split_debug_info)
31763 {
31764 unsigned int save_loc_list_idx = loc_list_idx;
31765 loc_list_idx = 0;
31766 output_loclists_offsets (comp_unit_die ());
31767 gcc_assert (save_loc_list_idx == loc_list_idx);
31768 }
31769 output_location_lists (comp_unit_die ());
31770 if (dwarf_version >= 5)
31771 ASM_OUTPUT_LABEL (asm_out_file, l2);
31772 }
31773
31774 output_pubtables ();
31775
31776 /* Output the address range information if a CU (.debug_info section)
31777 was emitted. We output an empty table even if we had no functions
31778 to put in it. This because the consumer has no way to tell the
31779 difference between an empty table that we omitted and failure to
31780 generate a table that would have contained data. */
31781 if (info_section_emitted)
31782 {
31783 switch_to_section (debug_aranges_section);
31784 output_aranges ();
31785 }
31786
31787 /* Output ranges section if necessary. */
31788 if (!vec_safe_is_empty (ranges_table))
31789 {
31790 if (dwarf_version >= 5)
31791 output_rnglists (generation);
31792 else
31793 output_ranges ();
31794 }
31795
31796 /* Have to end the macro section. */
31797 if (have_macinfo)
31798 {
31799 switch_to_section (debug_macinfo_section);
31800 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31801 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31802 : debug_skeleton_line_section_label, false);
31803 dw2_asm_output_data (1, 0, "End compilation unit");
31804 }
31805
31806 /* Output the source line correspondence table. We must do this
31807 even if there is no line information. Otherwise, on an empty
31808 translation unit, we will generate a present, but empty,
31809 .debug_info section. IRIX 6.5 `nm' will then complain when
31810 examining the file. This is done late so that any filenames
31811 used by the debug_info section are marked as 'used'. */
31812 switch_to_section (debug_line_section);
31813 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31814 if (! output_asm_line_debug_info ())
31815 output_line_info (false);
31816 else if (asm_outputs_debug_line_str ())
31817 {
31818 /* When gas outputs DWARF5 .debug_line[_str] then we have to
31819 tell it the comp_dir and main file name for the zero entry
31820 line table. */
31821 const char *comp_dir, *filename0;
31822
31823 comp_dir = comp_dir_string ();
31824 if (comp_dir == NULL)
31825 comp_dir = "";
31826
31827 filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
31828 if (filename0 == NULL)
31829 filename0 = "";
31830
31831 fprintf (asm_out_file, "\t.file 0 ");
31832 output_quoted_string (asm_out_file, remap_debug_filename (comp_dir));
31833 fputc (' ', asm_out_file);
31834 output_quoted_string (asm_out_file, remap_debug_filename (filename0));
31835 fputc ('\n', asm_out_file);
31836 }
31837
31838 if (dwarf_split_debug_info && info_section_emitted)
31839 {
31840 switch_to_section (debug_skeleton_line_section);
31841 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31842 output_line_info (true);
31843 }
31844
31845 /* If we emitted any indirect strings, output the string table too. */
31846 if (debug_str_hash || skeleton_debug_str_hash)
31847 output_indirect_strings ();
31848 if (debug_line_str_hash)
31849 {
31850 switch_to_section (debug_line_str_section);
31851 const enum dwarf_form form = DW_FORM_line_strp;
31852 debug_line_str_hash->traverse<enum dwarf_form,
31853 output_indirect_string> (form);
31854 }
31855
31856 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31857 symview_upper_bound = 0;
31858 if (zero_view_p)
31859 bitmap_clear (zero_view_p);
31860 }
31861
31862 /* Returns a hash value for X (which really is a variable_value_struct). */
31863
31864 inline hashval_t
31865 variable_value_hasher::hash (variable_value_struct *x)
31866 {
31867 return (hashval_t) x->decl_id;
31868 }
31869
31870 /* Return nonzero if decl_id of variable_value_struct X is the same as
31871 UID of decl Y. */
31872
31873 inline bool
31874 variable_value_hasher::equal (variable_value_struct *x, tree y)
31875 {
31876 return x->decl_id == DECL_UID (y);
31877 }
31878
31879 /* Helper function for resolve_variable_value, handle
31880 DW_OP_GNU_variable_value in one location expression.
31881 Return true if exprloc has been changed into loclist. */
31882
31883 static bool
31884 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31885 {
31886 dw_loc_descr_ref next;
31887 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31888 {
31889 next = loc->dw_loc_next;
31890 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31891 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31892 continue;
31893
31894 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31895 if (DECL_CONTEXT (decl) != current_function_decl)
31896 continue;
31897
31898 dw_die_ref ref = lookup_decl_die (decl);
31899 if (ref)
31900 {
31901 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31902 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31903 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31904 continue;
31905 }
31906 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31907 if (l == NULL)
31908 continue;
31909 if (l->dw_loc_next)
31910 {
31911 if (AT_class (a) != dw_val_class_loc)
31912 continue;
31913 switch (a->dw_attr)
31914 {
31915 /* Following attributes allow both exprloc and loclist
31916 classes, so we can change them into a loclist. */
31917 case DW_AT_location:
31918 case DW_AT_string_length:
31919 case DW_AT_return_addr:
31920 case DW_AT_data_member_location:
31921 case DW_AT_frame_base:
31922 case DW_AT_segment:
31923 case DW_AT_static_link:
31924 case DW_AT_use_location:
31925 case DW_AT_vtable_elem_location:
31926 if (prev)
31927 {
31928 prev->dw_loc_next = NULL;
31929 prepend_loc_descr_to_each (l, AT_loc (a));
31930 }
31931 if (next)
31932 add_loc_descr_to_each (l, next);
31933 a->dw_attr_val.val_class = dw_val_class_loc_list;
31934 a->dw_attr_val.val_entry = NULL;
31935 a->dw_attr_val.v.val_loc_list = l;
31936 have_location_lists = true;
31937 return true;
31938 /* Following attributes allow both exprloc and reference,
31939 so if the whole expression is DW_OP_GNU_variable_value alone
31940 we could transform it into reference. */
31941 case DW_AT_byte_size:
31942 case DW_AT_bit_size:
31943 case DW_AT_lower_bound:
31944 case DW_AT_upper_bound:
31945 case DW_AT_bit_stride:
31946 case DW_AT_count:
31947 case DW_AT_allocated:
31948 case DW_AT_associated:
31949 case DW_AT_byte_stride:
31950 if (prev == NULL && next == NULL)
31951 break;
31952 /* FALLTHRU */
31953 default:
31954 if (dwarf_strict)
31955 continue;
31956 break;
31957 }
31958 /* Create DW_TAG_variable that we can refer to. */
31959 gen_decl_die (decl, NULL_TREE, NULL,
31960 lookup_decl_die (current_function_decl));
31961 ref = lookup_decl_die (decl);
31962 if (ref)
31963 {
31964 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31965 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31966 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31967 }
31968 continue;
31969 }
31970 if (prev)
31971 {
31972 prev->dw_loc_next = l->expr;
31973 add_loc_descr (&prev->dw_loc_next, next);
31974 free_loc_descr (loc, NULL);
31975 next = prev->dw_loc_next;
31976 }
31977 else
31978 {
31979 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31980 add_loc_descr (&loc, next);
31981 next = loc;
31982 }
31983 loc = prev;
31984 }
31985 return false;
31986 }
31987
31988 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31989
31990 static void
31991 resolve_variable_value (dw_die_ref die)
31992 {
31993 dw_attr_node *a;
31994 dw_loc_list_ref loc;
31995 unsigned ix;
31996
31997 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31998 switch (AT_class (a))
31999 {
32000 case dw_val_class_loc:
32001 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
32002 break;
32003 /* FALLTHRU */
32004 case dw_val_class_loc_list:
32005 loc = AT_loc_list (a);
32006 gcc_assert (loc);
32007 for (; loc; loc = loc->dw_loc_next)
32008 resolve_variable_value_in_expr (a, loc->expr);
32009 break;
32010 default:
32011 break;
32012 }
32013 }
32014
32015 /* Attempt to optimize DW_OP_GNU_variable_value refering to
32016 temporaries in the current function. */
32017
32018 static void
32019 resolve_variable_values (void)
32020 {
32021 if (!variable_value_hash || !current_function_decl)
32022 return;
32023
32024 struct variable_value_struct *node
32025 = variable_value_hash->find_with_hash (current_function_decl,
32026 DECL_UID (current_function_decl));
32027
32028 if (node == NULL)
32029 return;
32030
32031 unsigned int i;
32032 dw_die_ref die;
32033 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
32034 resolve_variable_value (die);
32035 }
32036
32037 /* Helper function for note_variable_value, handle one location
32038 expression. */
32039
32040 static void
32041 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
32042 {
32043 for (; loc; loc = loc->dw_loc_next)
32044 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
32045 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
32046 {
32047 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
32048 dw_die_ref ref = lookup_decl_die (decl);
32049 if (! ref && (flag_generate_lto || flag_generate_offload))
32050 {
32051 /* ??? This is somewhat a hack because we do not create DIEs
32052 for variables not in BLOCK trees early but when generating
32053 early LTO output we need the dw_val_class_decl_ref to be
32054 fully resolved. For fat LTO objects we'd also like to
32055 undo this after LTO dwarf output. */
32056 gcc_assert (DECL_CONTEXT (decl));
32057 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
32058 gcc_assert (ctx != NULL);
32059 gen_decl_die (decl, NULL_TREE, NULL, ctx);
32060 ref = lookup_decl_die (decl);
32061 gcc_assert (ref != NULL);
32062 }
32063 if (ref)
32064 {
32065 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
32066 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
32067 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
32068 continue;
32069 }
32070 if (VAR_P (decl)
32071 && DECL_CONTEXT (decl)
32072 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
32073 && lookup_decl_die (DECL_CONTEXT (decl)))
32074 {
32075 if (!variable_value_hash)
32076 variable_value_hash
32077 = hash_table<variable_value_hasher>::create_ggc (10);
32078
32079 tree fndecl = DECL_CONTEXT (decl);
32080 struct variable_value_struct *node;
32081 struct variable_value_struct **slot
32082 = variable_value_hash->find_slot_with_hash (fndecl,
32083 DECL_UID (fndecl),
32084 INSERT);
32085 if (*slot == NULL)
32086 {
32087 node = ggc_cleared_alloc<variable_value_struct> ();
32088 node->decl_id = DECL_UID (fndecl);
32089 *slot = node;
32090 }
32091 else
32092 node = *slot;
32093
32094 vec_safe_push (node->dies, die);
32095 }
32096 }
32097 }
32098
32099 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32100 with dw_val_class_decl_ref operand. */
32101
32102 static void
32103 note_variable_value (dw_die_ref die)
32104 {
32105 dw_die_ref c;
32106 dw_attr_node *a;
32107 dw_loc_list_ref loc;
32108 unsigned ix;
32109
32110 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32111 switch (AT_class (a))
32112 {
32113 case dw_val_class_loc_list:
32114 loc = AT_loc_list (a);
32115 gcc_assert (loc);
32116 if (!loc->noted_variable_value)
32117 {
32118 loc->noted_variable_value = 1;
32119 for (; loc; loc = loc->dw_loc_next)
32120 note_variable_value_in_expr (die, loc->expr);
32121 }
32122 break;
32123 case dw_val_class_loc:
32124 note_variable_value_in_expr (die, AT_loc (a));
32125 break;
32126 default:
32127 break;
32128 }
32129
32130 /* Mark children. */
32131 FOR_EACH_CHILD (die, c, note_variable_value (c));
32132 }
32133
32134 /* Perform any cleanups needed after the early debug generation pass
32135 has run. */
32136
32137 static void
32138 dwarf2out_early_finish (const char *filename)
32139 {
32140 set_early_dwarf s;
32141 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32142
32143 /* PCH might result in DW_AT_producer string being restored from the
32144 header compilation, so always fill it with empty string initially
32145 and overwrite only here. */
32146 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32147
32148 if (dwarf_record_gcc_switches)
32149 producer_string = gen_producer_string (lang_hooks.name,
32150 save_decoded_options,
32151 save_decoded_options_count);
32152 else
32153 producer_string = concat (lang_hooks.name, " ", version_string, NULL);
32154
32155 producer->dw_attr_val.v.val_str->refcount--;
32156 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32157
32158 /* Add the name for the main input file now. We delayed this from
32159 dwarf2out_init to avoid complications with PCH. */
32160 add_filename_attribute (comp_unit_die (), remap_debug_filename (filename));
32161 add_comp_dir_attribute (comp_unit_die ());
32162
32163 /* With LTO early dwarf was really finished at compile-time, so make
32164 sure to adjust the phase after annotating the LTRANS CU DIE. */
32165 if (in_lto_p)
32166 {
32167 early_dwarf_finished = true;
32168 if (dump_file)
32169 {
32170 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32171 print_die (comp_unit_die (), dump_file);
32172 }
32173 return;
32174 }
32175
32176 /* Walk through the list of incomplete types again, trying once more to
32177 emit full debugging info for them. */
32178 retry_incomplete_types ();
32179
32180 gen_scheduled_generic_parms_dies ();
32181 gen_remaining_tmpl_value_param_die_attribute ();
32182
32183 /* The point here is to flush out the limbo list so that it is empty
32184 and we don't need to stream it for LTO. */
32185 flush_limbo_die_list ();
32186
32187 /* Add DW_AT_linkage_name for all deferred DIEs. */
32188 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32189 {
32190 tree decl = node->created_for;
32191 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32192 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32193 ended up in deferred_asm_name before we knew it was
32194 constant and never written to disk. */
32195 && DECL_ASSEMBLER_NAME (decl))
32196 {
32197 add_linkage_attr (node->die, decl);
32198 move_linkage_attr (node->die);
32199 }
32200 }
32201 deferred_asm_name = NULL;
32202
32203 if (flag_eliminate_unused_debug_types)
32204 prune_unused_types ();
32205
32206 /* Generate separate COMDAT sections for type DIEs. */
32207 if (use_debug_types)
32208 {
32209 break_out_comdat_types (comp_unit_die ());
32210
32211 /* Each new type_unit DIE was added to the limbo die list when created.
32212 Since these have all been added to comdat_type_list, clear the
32213 limbo die list. */
32214 limbo_die_list = NULL;
32215
32216 /* For each new comdat type unit, copy declarations for incomplete
32217 types to make the new unit self-contained (i.e., no direct
32218 references to the main compile unit). */
32219 for (comdat_type_node *ctnode = comdat_type_list;
32220 ctnode != NULL; ctnode = ctnode->next)
32221 copy_decls_for_unworthy_types (ctnode->root_die);
32222 copy_decls_for_unworthy_types (comp_unit_die ());
32223
32224 /* In the process of copying declarations from one unit to another,
32225 we may have left some declarations behind that are no longer
32226 referenced. Prune them. */
32227 prune_unused_types ();
32228 }
32229
32230 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32231 with dw_val_class_decl_ref operand. */
32232 note_variable_value (comp_unit_die ());
32233 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32234 note_variable_value (node->die);
32235 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32236 ctnode = ctnode->next)
32237 note_variable_value (ctnode->root_die);
32238 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32239 note_variable_value (node->die);
32240
32241 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32242 both the main_cu and all skeleton TUs. Making this call unconditional
32243 would end up either adding a second copy of the AT_pubnames attribute, or
32244 requiring a special case in add_top_level_skeleton_die_attrs. */
32245 if (!dwarf_split_debug_info)
32246 add_AT_pubnames (comp_unit_die ());
32247
32248 /* The early debug phase is now finished. */
32249 early_dwarf_finished = true;
32250 if (dump_file)
32251 {
32252 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32253 print_die (comp_unit_die (), dump_file);
32254 }
32255
32256 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32257 if ((!flag_generate_lto && !flag_generate_offload)
32258 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32259 copy_lto_debug_sections operation of the simple object support in
32260 libiberty is not implemented for them yet. */
32261 || TARGET_PECOFF || TARGET_COFF)
32262 return;
32263
32264 /* Now as we are going to output for LTO initialize sections and labels
32265 to the LTO variants. We don't need a random-seed postfix as other
32266 LTO sections as linking the LTO debug sections into one in a partial
32267 link is fine. */
32268 init_sections_and_labels (true);
32269
32270 /* The output below is modeled after dwarf2out_finish with all
32271 location related output removed and some LTO specific changes.
32272 Some refactoring might make both smaller and easier to match up. */
32273
32274 /* Traverse the DIE's and add sibling attributes to those DIE's
32275 that have children. */
32276 add_sibling_attributes (comp_unit_die ());
32277 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32278 add_sibling_attributes (node->die);
32279 for (comdat_type_node *ctnode = comdat_type_list;
32280 ctnode != NULL; ctnode = ctnode->next)
32281 add_sibling_attributes (ctnode->root_die);
32282
32283 /* AIX Assembler inserts the length, so adjust the reference to match the
32284 offset expected by debuggers. */
32285 strcpy (dl_section_ref, debug_line_section_label);
32286 if (XCOFF_DEBUGGING_INFO)
32287 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32288
32289 if (debug_info_level >= DINFO_LEVEL_TERSE)
32290 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32291
32292 if (have_macinfo)
32293 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32294 macinfo_section_label);
32295
32296 save_macinfo_strings ();
32297
32298 if (dwarf_split_debug_info)
32299 {
32300 unsigned int index = 0;
32301 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32302 }
32303
32304 /* Output all of the compilation units. We put the main one last so that
32305 the offsets are available to output_pubnames. */
32306 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32307 output_comp_unit (node->die, 0, NULL);
32308
32309 hash_table<comdat_type_hasher> comdat_type_table (100);
32310 for (comdat_type_node *ctnode = comdat_type_list;
32311 ctnode != NULL; ctnode = ctnode->next)
32312 {
32313 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32314
32315 /* Don't output duplicate types. */
32316 if (*slot != HTAB_EMPTY_ENTRY)
32317 continue;
32318
32319 /* Add a pointer to the line table for the main compilation unit
32320 so that the debugger can make sense of DW_AT_decl_file
32321 attributes. */
32322 if (debug_info_level >= DINFO_LEVEL_TERSE)
32323 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32324 (!dwarf_split_debug_info
32325 ? debug_line_section_label
32326 : debug_skeleton_line_section_label));
32327
32328 output_comdat_type_unit (ctnode, true);
32329 *slot = ctnode;
32330 }
32331
32332 /* Stick a unique symbol to the main debuginfo section. */
32333 compute_comp_unit_symbol (comp_unit_die ());
32334
32335 /* Output the main compilation unit. We always need it if only for
32336 the CU symbol. */
32337 output_comp_unit (comp_unit_die (), true, NULL);
32338
32339 /* Output the abbreviation table. */
32340 if (vec_safe_length (abbrev_die_table) != 1)
32341 {
32342 switch_to_section (debug_abbrev_section);
32343 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32344 output_abbrev_section ();
32345 }
32346
32347 /* Have to end the macro section. */
32348 if (have_macinfo)
32349 {
32350 /* We have to save macinfo state if we need to output it again
32351 for the FAT part of the object. */
32352 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32353 if (flag_fat_lto_objects)
32354 macinfo_table = macinfo_table->copy ();
32355
32356 switch_to_section (debug_macinfo_section);
32357 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32358 output_macinfo (debug_line_section_label, true);
32359 dw2_asm_output_data (1, 0, "End compilation unit");
32360
32361 if (flag_fat_lto_objects)
32362 {
32363 vec_free (macinfo_table);
32364 macinfo_table = saved_macinfo_table;
32365 }
32366 }
32367
32368 /* Emit a skeleton debug_line section. */
32369 switch_to_section (debug_line_section);
32370 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32371 output_line_info (true);
32372
32373 /* If we emitted any indirect strings, output the string table too. */
32374 if (debug_str_hash || skeleton_debug_str_hash)
32375 output_indirect_strings ();
32376 if (debug_line_str_hash)
32377 {
32378 switch_to_section (debug_line_str_section);
32379 const enum dwarf_form form = DW_FORM_line_strp;
32380 debug_line_str_hash->traverse<enum dwarf_form,
32381 output_indirect_string> (form);
32382 }
32383
32384 /* Switch back to the text section. */
32385 switch_to_section (text_section);
32386 }
32387
32388 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32389 within the same process. For use by toplev::finalize. */
32390
32391 void
32392 dwarf2out_c_finalize (void)
32393 {
32394 last_var_location_insn = NULL;
32395 cached_next_real_insn = NULL;
32396 used_rtx_array = NULL;
32397 incomplete_types = NULL;
32398 debug_info_section = NULL;
32399 debug_skeleton_info_section = NULL;
32400 debug_abbrev_section = NULL;
32401 debug_skeleton_abbrev_section = NULL;
32402 debug_aranges_section = NULL;
32403 debug_addr_section = NULL;
32404 debug_macinfo_section = NULL;
32405 debug_line_section = NULL;
32406 debug_skeleton_line_section = NULL;
32407 debug_loc_section = NULL;
32408 debug_pubnames_section = NULL;
32409 debug_pubtypes_section = NULL;
32410 debug_str_section = NULL;
32411 debug_line_str_section = NULL;
32412 debug_str_dwo_section = NULL;
32413 debug_str_offsets_section = NULL;
32414 debug_ranges_section = NULL;
32415 debug_frame_section = NULL;
32416 fde_vec = NULL;
32417 debug_str_hash = NULL;
32418 debug_line_str_hash = NULL;
32419 skeleton_debug_str_hash = NULL;
32420 dw2_string_counter = 0;
32421 have_multiple_function_sections = false;
32422 text_section_used = false;
32423 cold_text_section_used = false;
32424 cold_text_section = NULL;
32425 current_unit_personality = NULL;
32426
32427 early_dwarf = false;
32428 early_dwarf_finished = false;
32429
32430 next_die_offset = 0;
32431 single_comp_unit_die = NULL;
32432 comdat_type_list = NULL;
32433 limbo_die_list = NULL;
32434 file_table = NULL;
32435 decl_die_table = NULL;
32436 common_block_die_table = NULL;
32437 decl_loc_table = NULL;
32438 call_arg_locations = NULL;
32439 call_arg_loc_last = NULL;
32440 call_site_count = -1;
32441 tail_call_site_count = -1;
32442 cached_dw_loc_list_table = NULL;
32443 abbrev_die_table = NULL;
32444 delete dwarf_proc_stack_usage_map;
32445 dwarf_proc_stack_usage_map = NULL;
32446 line_info_label_num = 0;
32447 cur_line_info_table = NULL;
32448 text_section_line_info = NULL;
32449 cold_text_section_line_info = NULL;
32450 separate_line_info = NULL;
32451 info_section_emitted = false;
32452 pubname_table = NULL;
32453 pubtype_table = NULL;
32454 macinfo_table = NULL;
32455 ranges_table = NULL;
32456 ranges_by_label = NULL;
32457 rnglist_idx = 0;
32458 have_location_lists = false;
32459 loclabel_num = 0;
32460 poc_label_num = 0;
32461 last_emitted_file = NULL;
32462 label_num = 0;
32463 tmpl_value_parm_die_table = NULL;
32464 generic_type_instances = NULL;
32465 frame_pointer_fb_offset = 0;
32466 frame_pointer_fb_offset_valid = false;
32467 base_types.release ();
32468 XDELETEVEC (producer_string);
32469 producer_string = NULL;
32470 output_line_info_generation = 0;
32471 init_sections_and_labels_generation = 0;
32472 }
32473
32474 #include "gt-dwarf2out.h"