In backend_init, following invokes init_function_once.
8140 void
8141 init_function_once (void) in function.c
8142 {
8143 VARRAY_INT_INIT (prologue, 0, "prologue");
8144 VARRAY_INT_INIT (epilogue, 0, "epilogue");
8145 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
8146 }
Above, prologue records the INSN_UIDs of the prologue instructions, epilogue records the INSN_UIDs of the epilogue instructions, and sibcall_epilogue records INSN_UIDs for each sibcall epilogue in the function. INSN_UID is a unique number for each instruction which is created by compiler to tell about instructions in the source. They are not necessarily sequentially increasing. And for INSN rtx objects, INSN_UID is their first child. prologue, epilogue, and sibcall_epilogue are of varray_type type, which is the virtual array can hold multiple type objects. The definition of varray_type is quite similar with that of rtx. It is an interesting data structure.
132 struct varray_head_tag GTY(()) { in varray.h
133 size_t num_elements; /* Maximum element number allocated. */
134 size_t elements_used; /* The number of elements used, if
135 using VARRAY_PUSH/VARRAY_POP. */
136 enum varray_data_enum type; /* The kind of elements in the varray. */
137 const char *name; /* name of the varray for reporting errors */
138 varray_data GTY ((desc ("%0.type"))) data; /* The data elements follow,
139 must be last. */
140 };
141 typedef struct varray_head_tag *varray_type;
Note that varray_head_tag is not just a header to define its size, element’s type, it also contains the body – varray_data.
88 typedef union varray_data_tag GTY (()) { in varray.h
89 char GTY ((length ("%0.num_elements"),
90 tag ("VARRAY_DATA_C"))) c[1];
91 unsigned char GTY ((length ("%0.num_elements"),
92 tag ("VARRAY_DATA_UC"))) uc[1];
93 short GTY ((length ("%0.num_elements"),
94 tag ("VARRAY_DATA_S"))) s[1];
95 unsigned short GTY ((length ("%0.num_elements"),
96 tag ("VARRAY_DATA_US"))) us[1];
97 int GTY ((length ("%0.num_elements"),
98 tag ("VARRAY_DATA_I"))) i[1];
99 unsigned int GTY ((length ("%0.num_elements"),
100 tag ("VARRAY_DATA_U"))) u[1];
101 long GTY ((length ("%0.num_elements"),
102 tag ("VARRAY_DATA_L"))) l[1];
103 unsigned long GTY ((length ("%0.num_elements"),
104 tag ("VARRAY_DATA_UL"))) ul[1];
105 HOST_WIDE_INT GTY ((length ("%0.num_elements"),
106 tag ("VARRAY_DATA_HINT"))) hint[1];
107 unsigned HOST_WIDE_INT GTY ((length ("%0.num_elements"),
108 tag ("VARRAY_DATA_UHINT"))) uhint[1];
109 PTR GTY ((length ("%0.num_elements"), use_param (""),
110 tag ("VARRAY_DATA_GENERIC"))) generic[1];
111 char *GTY ((length ("%0.num_elements"),
112 tag ("VARRAY_DATA_CPTR"))) cptr[1];
113 rtx GTY ((length ("%0.num_elements"),
114 tag ("VARRAY_DATA_RTX"))) rtx[1];
115 rtvec GTY ((length ("%0.num_elements"),
116 tag ("VARRAY_DATA_RTVEC"))) rtvec[1];
117 tree GTY ((length ("%0.num_elements"),
118 tag ("VARRAY_DATA_TREE"))) tree[1];
119 struct bitmap_head_def *GTY ((length ("%0.num_elements"),
120 tag ("VARRAY_DATA_BITMAP"))) bitmap[1];
121 struct reg_info_def *GTY ((length ("%0.num_elements"), skip (""),
122 tag ("VARRAY_DATA_REG"))) reg[1];
123 struct const_equiv_data GTY ((length ("%0.num_elements"),
124 tag ("VARRAY_DATA_CONST_EQUIV"))) const_equiv[1];
125 struct basic_block_def *GTY ((length ("%0.num_elements"), skip (""),
126 tag ("VARRAY_DATA_BB"))) bb[1];
127 struct elt_list *GTY ((length ("%0.num_elements"),
128 tag ("VARRAY_DATA_TE"))) te[1];
129 } varray_data;
VARRAY_INT_INIT initializes these varray with 0 element of int type – it’s empty now.
159 #define VARRAY_INT_INIT(va, num, name) / in varray.h
160 va = varray_init (num, VARRAY_DATA_I, name)
115 varray_type
116 varray_init (size_t num_elements, enum varray_data_enum element_kind, in varray.c
117 const char *name)
118 {
119 size_t data_size = num_elements * element[element_kind].size;
120 varray_type ptr;
121 #ifdef GATHER_STATISTICS
122 struct varray_descriptor *desc = varray_descriptor (name);
123
124 desc->created++;
125 desc->allocated += data_size + VARRAY_HDR_SIZE;
126 #endif
127 if (element[element_kind].uses_ggc)
128 ptr = ggc_alloc_cleared (VARRAY_HDR_SIZE + data_size);
129 else
130 ptr = xcalloc (VARRAY_HDR_SIZE + data_size, 1);
131
132 ptr->num_elements = num_elements;
133 ptr->elements_used = 0;
134 ptr->type = element_kind;
135 ptr->name = name;
136 return ptr;
137 }
To decide the size and the type of the element holds, some information needed. This information is saved in a constant static array named element, which is shown below.
87 static const struct { in varray.c
88 unsigned char size;
89 bool uses_ggc;
90 } element[NUM_VARRAY_DATA] = {
91 { sizeof (char), 1 },
92 { sizeof (unsigned char), 1 },
93 { sizeof (short), 1 },
94 { sizeof (unsigned short), 1 },
95 { sizeof (int), 1 },
96 { sizeof (unsigned int), 1 },
97 { sizeof (long), 1 },
98 { sizeof (unsigned long), 1 },
99 { sizeof (HOST_WIDE_INT), 1 },
100 { sizeof (unsigned HOST_WIDE_INT), 1 },
101 { sizeof (void *), 1 },
102 { sizeof (char *), 1 },
103 { sizeof (struct rtx_def *), 1 },
104 { sizeof (struct rtvec_def *), 1 },
105 { sizeof (union tree_node *), 1 },
106 { sizeof (struct bitmap_head_def *), 1 },
107 { sizeof (struct reg_info_def *), 0 },
108 { sizeof (struct const_equiv_data), 0 },
109 { sizeof (struct basic_block_def *), 0 },
110 { sizeof (struct elt_list *), 1 },
111 };
Back in backend_init, the following function is init_varasm_once.
4584 void
4585 init_varasm_once (void) in varasm.c
4586 {
4587 in_named_htab = htab_create_ggc (31, in_named_entry_hash,
4588 in_named_entry_eq, NULL);
4589 const_desc_htab = htab_create_ggc (1009, const_desc_hash,
4590 const_desc_eq, NULL);
4591
4592 const_alias_set = new_alias_set ();
4593 }
Above in_named_htab is the hash table of flags that have been used for a particular named section. const_desc_htab is the hash table of all constants rtx objects during assemble emission. const_alias_set describes the alias set of these constans (constant’s alias set only contains itself).
Back to backend_init, following is init_dummy_function_start. It initializes the rtl expansion mechanism so that we can do simple things like generate sequences. This is used to provide a context during global initialization of some passes.
6518 void
6519 init_dummy_function_start (void) in function.c
6520 {
6521 prepare_function_start (NULL);
6522 }
Here it invokes prepare_function_start to create a dummy function context cfun as following initialization functions need to generate rtl (in general code compilation, only after code parsing and optimizing supported by the front-end, the representation in rtl form would be generated and passed to the back-end).
6482 static void
6483 prepare_function_start (tree fndecl) in function.c
6484 {
6485 if (fndecl && DECL_SAVED_INSNS (fndecl))
6486 cfun = DECL_SAVED_INSNS (fndecl);
6487 else
6488 allocate_struct_function (fndecl);
6489 init_emit ();
6490 init_varasm_status (cfun);
6491 init_expr ();
6492
6493 cse_not_expected = ! optimize;
6494
6495 /* Caller save not needed yet. */
6496 caller_save_needed = 0;
6497
6498 /* We haven't done register allocation yet. */
6499 reg_renumber = 0;
6500
6501 /* Indicate that we need to distinguish between the return value of the
6502 present function and the return value of a function being called. */
6503 rtx_equal_function_value_matters = 1;
6504
6505 /* Indicate that we have not instantiated virtual registers yet. */
6506 virtuals_instantiated = 0;
6507
6508 /* Indicate that we want CONCATs now. */
6509 generating_concat_p = 1;
6510
6511 /* Indicate we have no need of a frame pointer yet. */
6512 frame_pointer_needed = 0;
6513 }
Struct function saves all the important global and static variables describing the status of the current function under compilation.
176 struct function GTY(()) in function.h
177 {
178 struct eh_status *eh;
179 struct stmt_status *stmt;
180 struct expr_status *expr;
181 struct emit_status *emit;
182 struct varasm_status *varasm;
183
184 /* For function.c. */
185
186 /* Points to the FUNCTION_DECL of this function. */
187 tree decl;
188
189 /* Function containing this function, if any. */
190 struct function *outer;
191
192 /* Number of bytes of args popped by function being compiled on its return.
193 Zero if no bytes are to be popped.
194 May affect compilation of return insn or of function epilogue. */
195 int pops_args;
196
197 /* If function's args have a fixed size, this is that size, in bytes.
198 Otherwise, it is -1.
199 May affect compilation of return insn or of function epilogue. */
200 int args_size;
201
202 /* # bytes the prologue should push and pretend that the caller pushed them.
203 The prologue must do this, but only if parms can be passed in
204 registers. */
205 int pretend_args_size;
206
207 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
208 defined, the needed space is pushed by the prologue. */
209 int outgoing_args_size;
210
211 /* This is the offset from the arg pointer to the place where the first
212 anonymous arg can be found, if there is one. */
213 rtx arg_offset_rtx;
214
215 /* Quantities of various kinds of registers
216 used for the current function's args. */
217 CUMULATIVE_ARGS args_info;
218
219 /* If nonzero, an RTL expression for the location at which the current
220 function returns its result. If the current function returns its
221 result in a register, current_function_return_rtx will always be
222 the hard register containing the result. */
223 rtx return_rtx;
224
225 /* The arg pointer hard register, or the pseudo into which it was copied. */
226 rtx internal_arg_pointer;
227
228 /* Language-specific reason why the current function cannot be made
229 inline. */
230 const char *cannot_inline;
231
232 /* Opaque pointer used by get_hard_reg_initial_val and
233 has_hard_reg_initial_val (see integrate.[hc]). */
234 struct initial_value_struct *hard_reg_initial_vals;
235
236 /* Number of function calls seen so far in current function. */
237 int x_function_call_count;
238
239 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
240 (labels to which there can be nonlocal gotos from nested functions)
241 in this function. */
242 tree x_nonlocal_labels;
243
244 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
245 for nonlocal gotos. There is one for every nonlocal label in the
246 function; this list matches the one in nonlocal_labels.
247 Zero when function does not have nonlocal labels. */
248 rtx x_nonlocal_goto_handler_slots;
249
250 /* List (chain of EXPR_LIST) of labels heading the current handlers for
251 nonlocal gotos. */
252 rtx x_nonlocal_goto_handler_labels;
253
254 /* RTX for stack slot that holds the stack pointer value to restore
255 for a nonlocal goto.
256 Zero when function does not have nonlocal labels. */
257 rtx x_nonlocal_goto_stack_level;
258
259 /* Label that will go on parm cleanup code, if any.
260 Jumping to this label runs cleanup code for parameters, if
261 such code must be run. Following this code is the logical return
262 label. */
263 rtx x_cleanup_label;
264
265 /* Label that will go on function epilogue.
266 Jumping to this label serves as a "return" instruction
267 on machines which require execution of the epilogue on all returns. */
268 rtx x_return_label;
269
270 /* Label that will go on the end of function epilogue.
271 Jumping to this label serves as a "naked return" instruction
272 on machines which require execution of the epilogue on all returns. */
273 rtx x_naked_return_label;
274
275 /* Label and register for unswitching computed gotos. */
276 rtx computed_goto_common_label;
277 rtx computed_goto_common_reg;
278
279 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
280 So we can mark them all live at the end of the function, if nonopt. */
281 rtx x_save_expr_regs;
282
283 /* List (chain of EXPR_LISTs) of all stack slots in this function.
284 Made for the sake of unshare_all_rtl. */
285 rtx x_stack_slot_list;
286
287 /* Chain of all RTL_EXPRs that have insns in them. */
288 tree x_rtl_expr_chain;
289
290 /* Label to jump back to for tail recursion, or 0 if we have
291 not yet needed one for this function. */
292 rtx x_tail_recursion_label;
293
294 /* Place after which to insert the tail_recursion_label if we need one. */
295 rtx x_tail_recursion_reentry;
296
297 /* Location at which to save the argument pointer if it will need to be
298 referenced. There are two cases where this is done: if nonlocal gotos
299 exist, or if vars stored at an offset from the argument pointer will be
300 needed by inner routines. */
301 rtx x_arg_pointer_save_area;
302
303 /* If the function returns non-void, we will emit a clobber of the
304 return registers just in case the user fell off the end without
305 returning a proper value. This is that insn. */
306 rtx x_clobber_return_insn;
307
308 /* Offset to end of allocated area of stack frame.
309 If stack grows down, this is the address of the last stack slot allocated.
310 If stack grows up, this is the address for the next slot. */
311 HOST_WIDE_INT x_frame_offset;
312
313 /* List (chain of TREE_LISTs) of static chains for containing functions.
314 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
315 in an RTL_EXPR in the TREE_VALUE. */
316 tree x_context_display;
317
318 /* List (chain of TREE_LISTs) of trampolines for nested functions.
319 The trampoline sets up the static chain and jumps to the function.
320 We supply the trampoline's address when the function's address is
321 requested.
322
323 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
324 in an RTL_EXPR in the TREE_VALUE. */
325 tree x_trampoline_list;
326
327 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
328 rtx x_parm_birth_insn;
329
330 /* Last insn of those whose job was to put parms into their nominal
331 homes. */
332 rtx x_last_parm_insn;
333
334 /* 1 + last pseudo register number possibly used for loading a copy
335 of a parameter of this function. */
336 unsigned int x_max_parm_reg;
337
338 /* Vector indexed by REGNO, containing location on stack in which
339 to put the parm which is nominally in pseudo register REGNO,
340 if we discover that that parm must go in the stack. The highest
341 element in this vector is one less than MAX_PARM_REG, above. */
342 rtx * GTY ((length ("%h.x_max_parm_reg"))) x_parm_reg_stack_loc;
343
344 /* List of all temporaries allocated, both available and in use. */
345 struct temp_slot *x_temp_slots;
346
347 /* Current nesting level for temporaries. */
348 int x_temp_slot_level;
349
350 /* Current nesting level for variables in a block. */
351 int x_var_temp_slot_level;
352
353 /* When temporaries are created by TARGET_EXPRs, they are created at
354 this level of temp_slot_level, so that they can remain allocated
355 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
356 of TARGET_EXPRs. */
357 int x_target_temp_slot_level;
358
359 /* This slot is initialized as 0 and is added to
360 during the nested function. */
361 struct var_refs_queue *fixup_var_refs_queue;
362
363 /* For integrate.c. */
364 int inlinable;
365 int no_debugging_symbols;
366 rtvec original_arg_vector;
367 tree original_decl_initial;
368 /* Last insn of those whose job was to put parms into their nominal
369 homes. */
370 rtx inl_last_parm_insn;
371 /* Highest label number in current function. */
372 int inl_max_label_num;
373
374 /* Function sequence number for profiling, debugging, etc. */
375 int funcdef_no;
376
377 /* For md files. */
378
379 /* tm.h can use this to store whatever it likes. */
380 struct machine_function * GTY ((maybe_undef (""))) machine;
381 /* The largest alignment of slot allocated on the stack. */
382 int stack_alignment_needed;
383 /* Preferred alignment of the end of stack frame. */
384 int preferred_stack_boundary;
385 /* Set when the call to function itself has been emit. */
386 bool recursive_call_emit;
387
388 /* Language-specific code can use this to store whatever it likes. */
389 struct language_function * language;
390
391 /* For reorg. */
392
393 /* If some insns can be deferred to the delay slots of the epilogue, the
394 delay list for them is recorded here. */
395 rtx epilogue_delay_list;
396
397 /* How commonly executed the function is. Initialized during branch
398 probabilities pass. */
399 enum function_frequency {
400 /* This function most likely won't be executed at all.
401 (set only when profile feedback is available). */
402 FUNCTION_FREQUENCY_UNLIKELY_EXECUTED,
403 /* The default value. */
404 FUNCTION_FREQUENCY_NORMAL,
405 /* Optimize this function hard
406 (set only when profile feedback is available). */
407 FUNCTION_FREQUENCY_HOT
408 } function_frequency;
409
410 /* Maximal number of entities in the single jumptable. Used to estimate
411 final flowgraph size. */
412 int max_jumptable_ents;
413
414 /* Collected bit flags. */
415
416 /* Nonzero if function being compiled needs to be given an address
417 where the value should be stored. */
418 unsigned int returns_struct : 1;
419
420 /* Nonzero if function being compiled needs to
421 return the address of where it has put a structure value. */
422 unsigned int returns_pcc_struct : 1;
423
424 /* Nonzero if the current function returns a pointer type. */
425 unsigned int returns_pointer : 1;
426
427 /* Nonzero if function being compiled needs to be passed a static chain. */
428 unsigned int needs_context : 1;
429
430 /* Nonzero if function being compiled can call setjmp. */
431 unsigned int calls_setjmp : 1;
432
433 /* Nonzero if function being compiled can call longjmp. */
434 unsigned int calls_longjmp : 1;
435
436 /* Nonzero if function being compiled can call alloca,
437 either as a subroutine or builtin. */
438 unsigned int calls_alloca : 1;
439
440 /* Nonzero if the function calls __builtin_eh_return. */
441 unsigned int calls_eh_return : 1;
442
443 /* Nonzero if the function calls __builtin_constant_p. */
444 unsigned int calls_constant_p : 1;
445
446 /* Nonzero if function being compiled receives nonlocal gotos
447 from nested functions. */
448 unsigned int has_nonlocal_label : 1;
449
450 /* Nonzero if function being compiled has nonlocal gotos to parent
451 function. */
452 unsigned int has_nonlocal_goto : 1;
453
454 /* Nonzero if function being compiled contains nested functions. */
455 unsigned int contains_functions : 1;
456
457 /* Nonzero if the function being compiled issues a computed jump. */
458 unsigned int has_computed_jump : 1;
459
460 /* Nonzero if the current function is a thunk, i.e., a lightweight
461 function implemented by the output_mi_thunk hook) that just
462 adjusts one of its arguments and forwards to another
463 function. */
464 unsigned int is_thunk : 1;
465
466 /* This bit is used by the exception handling logic. It is set if all
467 calls (if any) are sibling calls. Such functions do not have to
468 have EH tables generated, as they cannot throw. A call to such a
469 function, however, should be treated as throwing if any of its callees
470 can throw. */
471 unsigned int all_throwers_are_sibcalls : 1;
472
473 /* Nonzero if instrumentation calls for function entry and exit should be
474 generated. */
475 unsigned int instrument_entry_exit : 1;
476
477 /* Nonzero if profiling code should be generated. */
478 unsigned int profile : 1;
479
480 /* Nonzero if stack limit checking should be enabled in the current
481 function. */
482 unsigned int limit_stack : 1;
483
484 /* Nonzero if current function uses stdarg.h or equivalent. */
485 unsigned int stdarg : 1;
486
487 /* Nonzero if this function is being processed in function-at-a-time
488 mode. In other words, if all tree structure for this function,
489 including the BLOCK tree, is created before RTL generation
490 commences. */
491 unsigned int x_whole_function_mode_p : 1;
492
493 /* Nonzero if the back-end should not keep track of expressions that
494 determine the size of variable-sized objects. Normally, such
495 expressions are saved away, and then expanded when the next
496 function is started. For example, if a parameter has a
497 variable-sized type, then the size of the parameter is computed
498 when the function body is entered. However, some front-ends do
499 not desire this behavior. */
500 unsigned int x_dont_save_pending_sizes_p : 1;
501
502 /* Nonzero if the current function uses the constant pool. */
503 unsigned int uses_const_pool : 1;
504
505 /* Nonzero if the current function uses pic_offset_table_rtx. */
506 unsigned int uses_pic_offset_table : 1;
507
508 /* Nonzero if the current function needs an lsda for exception handling. */
509 unsigned int uses_eh_lsda : 1;
510
511 /* Nonzero if code to initialize arg_pointer_save_area has been emitted. */
512 unsigned int arg_pointer_save_area_init : 1;
513
514 /* Flag for use by ther rtl inliner, to tell if the function has been
515 processed at least once. */
516 unsigned int rtl_inline_init : 1;
517
518 /* Nonzero if the rtl inliner has saved the function for inlining. */
519 unsigned int saved_for_inline : 1;
520 };
The most important parts are its first six members. Among them, eh_status controls the exception handling in the function; stmt_status records the accumulated effect of statements upon stack, data flow etc, and which will affect the translation of the current statement; expr_status records the detail about the invoke and exit behavior of the function; emit_status controls the emission of corresponding rtx instruction; varasm_status offers the way to record constants of the function.
6434 void
6435 allocate_struct_function (tree fndecl) in function.c
6436 {
6437 tree result;
6438
6439 cfun = ggc_alloc_cleared (sizeof (struct function));
6440
6441 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6442
6443 cfun->stack_alignment_needed = STACK_BOUNDARY;
6444 cfun->preferred_stack_boundary = STACK_BOUNDARY;
6445
6446 current_function_funcdef_no = funcdef_no++;
6447
6448 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6449
6450 init_stmt_for_function ();
6451 init_eh_for_function ();
6452
6453 (*lang_hooks.function.init) (cfun);
6454 if (init_machine_status)
6455 cfun->machine = (*init_machine_status) ();
6456
6457 if (fndecl == NULL)
6458 return;
…
6477 }
Above, current_function_funcdef_no, current_function_returns_pcc_struct, current_function_returns_struct, current_function_returns_pointer, max_parm_reg, current_function_needs_context are all macros used to select certain member of the function sturcture. For example:
558 #define current_function_funcdef_no (cfun->funcdef_no) in function.h
567 #define max_parm_reg (cfun->x_max_parm_reg)
At line 6441, for x86 machine, LAST_VIRTUAL_REGISTER is defined as FIRST_PSEUDO_REGISTER +4. The value is 57, it defines the maxium number of registers can be used in rtx form.
At line 6443 and 6444, STACK_BOUNDARY defines the alignment requirement for the stack. For x86 it is 32 (4 bytes).
At line 6446, funcdef_no assigns unique numbers to labels generated for profiling, debugging purpose etc. And the corresponding field in the function structure records this value.
Following, at line 6450 and 6451, functions init_stmt_for_function, init_eh_for_function just allocate eh and stmt field of the structure, which are used for exception handling and statement respectively.
At line 6453, lang_hooks.function.init for C++ language is cxx_push_function_context. For current context, it just allocate instance of language_function for the function object. Structure language_function saves and restores the variables that keep track of the progress of compilation of the current function. It is used for nested functions.
At line 6454, init_machine_status points to ix86_init_machine_status for x86 machine. It creates instance of machine_function for the function object, which saves machine dependent information relate with function handling.
At line 6457, in this time invocation, fndecl is null, the function returns at here.
Back to prepare_function_start, it then invokes init_emit initializes the data structures and variables related with RTL language generation. The data sturtcure related is emit_status, the emit object in function structure.
57 struct emit_status GTY(()) in function.h
58 {
59 /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function.
60 After rtl generation, it is 1 plus the largest register number used. */
61 int x_reg_rtx_no;
62
63 /* Lowest label number in current function. */
64 int x_first_label_num;
65
66 /* The ends of the doubly-linked chain of rtl for the current function.
67 Both are reset to null at the start of rtl generation for the function.
68
69 start_sequence saves both of these on `sequence_stack' along with
70 `sequence_rtl_expr' and then starts a new, nested sequence of insns. */
71 rtx x_first_insn;
72 rtx x_last_insn;
73
74 /* RTL_EXPR within which the current sequence will be placed. Use to
75 prevent reuse of any temporaries within the sequence until after the
76 RTL_EXPR is emitted. */
77 tree sequence_rtl_expr;
78
79 /* Stack of pending (incomplete) sequences saved by `start_sequence'.
80 Each element describes one pending sequence.
81 The main insn-chain is saved in the last element of the chain,
82 unless the chain is empty. */
83 struct sequence_stack *sequence_stack;
84
85 /* INSN_UID for next insn emitted.
86 Reset to 1 for each function compiled. */
87 int x_cur_insn_uid;
88
89 /* Location the last line-number NOTE emitted.
90 This is used to avoid generating duplicates. */
91 location_t x_last_location;
92
93 /* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx
94 vectors. Since these vectors are needed during the expansion phase when
95 the total number of registers in the function is not yet known, the
96 vectors are copied and made bigger when necessary. */
97 int regno_pointer_align_length;
98
99 /* Indexed by pseudo register number, if nonzero gives the known alignment
100 for that pseudo (if REG_POINTER is set in x_regno_reg_rtx).
101 Allocated in parallel with x_regno_reg_rtx. */
102 unsigned char * GTY ((length ("%h.x_reg_rtx_no")))
103 regno_pointer_align;
104
105 /* Indexed by pseudo register number, gives the rtx for that pseudo.
106 Allocated in parallel with regno_pointer_align.
107
108 Note MEM expressions can appear in this array due to the actions
109 of put_var_into_stack. */
110 rtx * GTY ((length ("%h.x_reg_rtx_no"))) x_regno_reg_rtx;
111 };
This structure controls the emition of rtl expression for the function under compiled.
5333 void
5334 init_emit (void) in emit-rtl.c
5335 {
5336 struct function *f = cfun;
5337
5338 f->emit = ggc_alloc (sizeof (struct emit_status));
5339 first_insn = NULL;
5340 last_insn = NULL;
5341 seq_rtl_expr = NULL;
5342 cur_insn_uid = 1;
5343 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5344 last_location.line = 0;
5345 last_location.file = 0;
5346 first_label_num = label_num;
5347 last_label_num = 0;
5348 seq_stack = NULL;
5349
5350 /* Init the tables that describe all the pseudo regs. */
5351
5352 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5353
5354 f->emit->regno_pointer_align
5355 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5356 * sizeof (unsigned char));
5357
5358 regno_reg_rtx
5359 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5360
5361 /* Put copies of all the hard registers into regno_reg_rtx. */
5362 memcpy (regno_reg_rtx,
5363 static_regno_reg_rtx,
5364 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5365
5366 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5367 init_virtual_regs (f->emit);
5368
5369 /* Indicate that the virtual registers and stack locations are
5370 all pointers. */
5371 REG_POINTER (stack_pointer_rtx) = 1;
5372 REG_POINTER (frame_pointer_rtx) = 1;
5373 REG_POINTER (hard_frame_pointer_rtx) = 1;
5374 REG_POINTER (arg_pointer_rtx) = 1;
5375
5376 REG_POINTER (virtual_incoming_args_rtx) = 1;
5377 REG_POINTER (virtual_stack_vars_rtx) = 1;
5378 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5379 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5380 REG_POINTER (virtual_cfa_rtx) = 1;
5381
5382 #ifdef STACK_BOUNDARY
5383 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5384 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5385 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5386 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5387
5388 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5389 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5390 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5391 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5392 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5393 #endif
5394
5395 #ifdef INIT_EXPANDERS
5396 INIT_EXPANDERS;
5397 #endif
5398 }
In source, to facilitate data member accession, macros are defined for all members in structure function, following are those used in above function.
114 #define reg_rtx_no (cfun->emit->x_reg_rtx_no) in function.h
115 #define seq_rtl_expr (cfun->emit->sequence_rtl_expr)
116 #define regno_reg_rtx (cfun->emit->x_regno_reg_rtx)
117 #define seq_stack (cfun->emit->sequence_stack)
173 #define first_insn (cfun->emit->x_first_insn) in emit-rtl.h
174 #define last_insn (cfun->emit->x_last_insn)
175 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
176 #define last_location (cfun->emit->x_last_location)
177 #define first_label_num (cfun->emit->x_first_label_num)
At line 5362, remember that static_regno_reg_rtx is created in init_emit_once which stands for hard registers in rtl language. It can be regarded as a constant set, for every time before compiling a function, the whole static_regno_reg_rtx is copied into x_reg_rtx_no of emit_status as here. Then init_virtual_regs updates special virtual registers as following.
5147 void
5148 init_virtual_regs (struct emit_status *es) in emit-rtl.c
5149 {
5150 rtx *ptr = es->x_regno_reg_rtx;
5151 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5152 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5153 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5154 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5155 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5156 }
Above, on the right hand side of the assignments, are all macros selecting certain items of global_rtx, which are also created in init_emit_once, and note that they all of Pmode. Pmode is target dependent and not considered during static_regno_reg_rtx creation.
Macro REG_POINTER checks if rtx object is a register that holds a pointer value.
1032 #define REG_POINTER(RTX) / in rtl.h
1033 (RTL_FLAG_CHECK1("REG_POINTER", (RTX), REG)->frame_related)
405 #define RTL_FLAG_CHECK1(NAME, RTX, C1) __extension__ / in rtl.h
406 ({ rtx const _rtx = (RTX); /
407 if (GET_CODE(_rtx) != C1) /
408 rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, /
409 __FUNCTION__); /
410 _rtx; })
We can see that all these needed special treated registers are frame related, as the assignment tells. Macro REGNO_POINTER_ALIGN selects the alignment requirement for the certain registers in emit_status instance. STACK_BOUNDARY in 32 bits ABI x86 machine is word size (ie. 4 bytes).
119 #define REGNO_POINTER_ALIGN(REGNO) (cfun->emit->regno_pointer_align[REGNO])
x86 machine doesn’t define INIT_EXPANDERS at line 5395.
In prepare_function_start, the next function is init_varasm_status, it initializes constant hashing pool for the new function object. Constant hashing pool is contained in structure varasm_status.
75 #define MAX_RTX_HASH_TABLE 61
76
77 struct varasm_status GTY(()) in varasm.c
78 {
79 /* Hash facility for making memory-constants
80 from constant rtl-expressions. It is used on RISC machines
81 where immediate integer arguments and constant addresses are restricted
82 so that such constants must be stored in memory.
83
84 This pool of constants is reinitialized for each function
85 so each function gets its own constants-pool that comes right before
86 it. */
87 struct constant_descriptor_rtx ** GTY ((length ("MAX_RTX_HASH_TABLE")))
88 x_const_rtx_hash_table;
89 struct pool_constant ** GTY ((length ("MAX_RTX_HASH_TABLE")))
90 x_const_rtx_sym_hash_table;
91
92 /* Pointers to first and last constant in pool. */
93 struct pool_constant *x_first_pool;
94 struct pool_constant *x_last_pool;
95
96 /* Current offset in constant pool (does not include any machine-specific
97 header). */
98 HOST_WIDE_INT x_pool_offset;
99
100 /* Number of tree-constants deferred during the expansion of this
101 function. */
102 unsigned int deferred_constants;
103 };
At line 89, 93, and 94 above, pool_constant records sufficient information about a constant. In it, the key part is the rtx object constant, at line 2642 below.
2636 struct pool_constant GTY(()) in varasm.c
2637 {
2638 struct constant_descriptor_rtx *desc;
2639 struct pool_constant *next;
2640 struct pool_constant *next_sym;
2641 rtx constant;
2642 enum machine_mode mode;
2643 int labelno;
2644 unsigned int align;
2645 HOST_WIDE_INT offset;
2646 int mark;
2647 };
2657 void
2658 init_varasm_status (struct function *f) in varasm.c
2659 {
2660 struct varasm_status *p;
2661 p = ggc_alloc (sizeof (struct varasm_status));
2662 f->varasm = p;
2663 p->x_const_rtx_hash_table
2664 = ggc_alloc_cleared (MAX_RTX_HASH_TABLE
2665 * sizeof (struct constant_descriptor_rtx *));
2666 p->x_const_rtx_sym_hash_table
2667 = ggc_alloc_cleared (MAX_RTX_HASH_TABLE
2668 * sizeof (struct pool_constant *));
2669
2670 p->x_first_pool = p->x_last_pool = 0;
2671 p->x_pool_offset = 0;
2672 p->deferred_constants = 0;
2673 }
Back to prepare_function_start, it then invokes init_expr to create an instance of expr_status. It is used to control the handling of every expression in the function.
121 struct expr_status GTY(()) in function.h
122 {
123 /* Number of units that we should eventually pop off the stack.
124 These are the arguments to function calls that have already returned. */
125 int x_pending_stack_adjust;
126
127 /* Under some ABIs, it is the caller's responsibility to pop arguments
128 pushed for function calls. A naive implementation would simply pop
129 the arguments immediately after each call. However, if several
130 function calls are made in a row, it is typically cheaper to pop
131 all the arguments after all of the calls are complete since a
132 single pop instruction can be used. Therefore, GCC attempts to
133 defer popping the arguments until absolutely necessary. (For
134 example, at the end of a conditional, the arguments must be popped,
135 since code outside the conditional won't know whether or not the
136 arguments need to be popped.)
137
138 When INHIBIT_DEFER_POP is nonzero, however, the compiler does not
139 attempt to defer pops. Instead, the stack is popped immediately
140 after each call. Rather then setting this variable directly, use
141 NO_DEFER_POP and OK_DEFER_POP. */
142 int x_inhibit_defer_pop;
143
144 /* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the
145 stack boundary can be momentarily unaligned while pushing the arguments.
146 Record the delta since last aligned boundary here in order to get
147 stack alignment in the nested function calls working right. */
148 int x_stack_pointer_delta;
149
150 /* Nonzero means __builtin_saveregs has already been done in this function.
151 The value is the pseudoreg containing the value __builtin_saveregs
152 returned. */
153 rtx x_saveregs_value;
154
155 /* Similarly for __builtin_apply_args. */
156 rtx x_apply_args_value;
157
158 /* List of labels that must never be deleted. */
159 rtx x_forced_labels;
160
161 /* Postincrements that still need to be expanded. */
162 rtx x_pending_chain;
163 };
At last, ggc_alloc_cleared creates an object filled with zero.
325 void
326 init_expr (void) in expr.c
327 {
328 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
329 }