File: | build/gcc/asan.c |
Warning: | line 3691, column 26 Value stored to 'shadow_align' during its initialization is never read |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* AddressSanitizer, a fast memory error detector. |
2 | Copyright (C) 2012-2021 Free Software Foundation, Inc. |
3 | Contributed by Kostya Serebryany <kcc@google.com> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free |
9 | Software Foundation; either version 3, or (at your option) any later |
10 | version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "target.h" |
27 | #include "rtl.h" |
28 | #include "tree.h" |
29 | #include "gimple.h" |
30 | #include "cfghooks.h" |
31 | #include "alloc-pool.h" |
32 | #include "tree-pass.h" |
33 | #include "memmodel.h" |
34 | #include "tm_p.h" |
35 | #include "ssa.h" |
36 | #include "stringpool.h" |
37 | #include "tree-ssanames.h" |
38 | #include "optabs.h" |
39 | #include "emit-rtl.h" |
40 | #include "cgraph.h" |
41 | #include "gimple-pretty-print.h" |
42 | #include "alias.h" |
43 | #include "fold-const.h" |
44 | #include "cfganal.h" |
45 | #include "gimplify.h" |
46 | #include "gimple-iterator.h" |
47 | #include "varasm.h" |
48 | #include "stor-layout.h" |
49 | #include "tree-iterator.h" |
50 | #include "stringpool.h" |
51 | #include "attribs.h" |
52 | #include "asan.h" |
53 | #include "dojump.h" |
54 | #include "explow.h" |
55 | #include "expr.h" |
56 | #include "output.h" |
57 | #include "langhooks.h" |
58 | #include "cfgloop.h" |
59 | #include "gimple-builder.h" |
60 | #include "gimple-fold.h" |
61 | #include "ubsan.h" |
62 | #include "builtins.h" |
63 | #include "fnmatch.h" |
64 | #include "tree-inline.h" |
65 | #include "tree-ssa.h" |
66 | |
67 | /* AddressSanitizer finds out-of-bounds and use-after-free bugs |
68 | with <2x slowdown on average. |
69 | |
70 | The tool consists of two parts: |
71 | instrumentation module (this file) and a run-time library. |
72 | The instrumentation module adds a run-time check before every memory insn. |
73 | For a 8- or 16- byte load accessing address X: |
74 | ShadowAddr = (X >> 3) + Offset |
75 | ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access. |
76 | if (ShadowValue) |
77 | __asan_report_load8(X); |
78 | For a load of N bytes (N=1, 2 or 4) from address X: |
79 | ShadowAddr = (X >> 3) + Offset |
80 | ShadowValue = *(char*)ShadowAddr; |
81 | if (ShadowValue) |
82 | if ((X & 7) + N - 1 > ShadowValue) |
83 | __asan_report_loadN(X); |
84 | Stores are instrumented similarly, but using __asan_report_storeN functions. |
85 | A call too __asan_init_vN() is inserted to the list of module CTORs. |
86 | N is the version number of the AddressSanitizer API. The changes between the |
87 | API versions are listed in libsanitizer/asan/asan_interface_internal.h. |
88 | |
89 | The run-time library redefines malloc (so that redzone are inserted around |
90 | the allocated memory) and free (so that reuse of free-ed memory is delayed), |
91 | provides __asan_report* and __asan_init_vN functions. |
92 | |
93 | Read more: |
94 | http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm |
95 | |
96 | The current implementation supports detection of out-of-bounds and |
97 | use-after-free in the heap, on the stack and for global variables. |
98 | |
99 | [Protection of stack variables] |
100 | |
101 | To understand how detection of out-of-bounds and use-after-free works |
102 | for stack variables, lets look at this example on x86_64 where the |
103 | stack grows downward: |
104 | |
105 | int |
106 | foo () |
107 | { |
108 | char a[23] = {0}; |
109 | int b[2] = {0}; |
110 | |
111 | a[5] = 1; |
112 | b[1] = 2; |
113 | |
114 | return a[5] + b[1]; |
115 | } |
116 | |
117 | For this function, the stack protected by asan will be organized as |
118 | follows, from the top of the stack to the bottom: |
119 | |
120 | Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone'] |
121 | |
122 | Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make |
123 | the next slot be 32 bytes aligned; this one is called Partial |
124 | Redzone; this 32 bytes alignment is an asan constraint] |
125 | |
126 | Slot 3/ [24 bytes for variable 'a'] |
127 | |
128 | Slot 4/ [red zone of 32 bytes called 'Middle RedZone'] |
129 | |
130 | Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2] |
131 | |
132 | Slot 6/ [8 bytes for variable 'b'] |
133 | |
134 | Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called |
135 | 'LEFT RedZone'] |
136 | |
137 | The 32 bytes of LEFT red zone at the bottom of the stack can be |
138 | decomposed as such: |
139 | |
140 | 1/ The first 8 bytes contain a magical asan number that is always |
141 | 0x41B58AB3. |
142 | |
143 | 2/ The following 8 bytes contains a pointer to a string (to be |
144 | parsed at runtime by the runtime asan library), which format is |
145 | the following: |
146 | |
147 | "<function-name> <space> <num-of-variables-on-the-stack> |
148 | (<32-bytes-aligned-offset-in-bytes-of-variable> <space> |
149 | <length-of-var-in-bytes> ){n} " |
150 | |
151 | where '(...){n}' means the content inside the parenthesis occurs 'n' |
152 | times, with 'n' being the number of variables on the stack. |
153 | |
154 | 3/ The following 8 bytes contain the PC of the current function which |
155 | will be used by the run-time library to print an error message. |
156 | |
157 | 4/ The following 8 bytes are reserved for internal use by the run-time. |
158 | |
159 | The shadow memory for that stack layout is going to look like this: |
160 | |
161 | - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1. |
162 | The F1 byte pattern is a magic number called |
163 | ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that |
164 | the memory for that shadow byte is part of a the LEFT red zone |
165 | intended to seat at the bottom of the variables on the stack. |
166 | |
167 | - content of shadow memory 8 bytes for slots 6 and 5: |
168 | 0xF4F4F400. The F4 byte pattern is a magic number |
169 | called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the |
170 | memory region for this shadow byte is a PARTIAL red zone |
171 | intended to pad a variable A, so that the slot following |
172 | {A,padding} is 32 bytes aligned. |
173 | |
174 | Note that the fact that the least significant byte of this |
175 | shadow memory content is 00 means that 8 bytes of its |
176 | corresponding memory (which corresponds to the memory of |
177 | variable 'b') is addressable. |
178 | |
179 | - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2. |
180 | The F2 byte pattern is a magic number called |
181 | ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory |
182 | region for this shadow byte is a MIDDLE red zone intended to |
183 | seat between two 32 aligned slots of {variable,padding}. |
184 | |
185 | - content of shadow memory 8 bytes for slot 3 and 2: |
186 | 0xF4000000. This represents is the concatenation of |
187 | variable 'a' and the partial red zone following it, like what we |
188 | had for variable 'b'. The least significant 3 bytes being 00 |
189 | means that the 3 bytes of variable 'a' are addressable. |
190 | |
191 | - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3. |
192 | The F3 byte pattern is a magic number called |
193 | ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory |
194 | region for this shadow byte is a RIGHT red zone intended to seat |
195 | at the top of the variables of the stack. |
196 | |
197 | Note that the real variable layout is done in expand_used_vars in |
198 | cfgexpand.c. As far as Address Sanitizer is concerned, it lays out |
199 | stack variables as well as the different red zones, emits some |
200 | prologue code to populate the shadow memory as to poison (mark as |
201 | non-accessible) the regions of the red zones and mark the regions of |
202 | stack variables as accessible, and emit some epilogue code to |
203 | un-poison (mark as accessible) the regions of red zones right before |
204 | the function exits. |
205 | |
206 | [Protection of global variables] |
207 | |
208 | The basic idea is to insert a red zone between two global variables |
209 | and install a constructor function that calls the asan runtime to do |
210 | the populating of the relevant shadow memory regions at load time. |
211 | |
212 | So the global variables are laid out as to insert a red zone between |
213 | them. The size of the red zones is so that each variable starts on a |
214 | 32 bytes boundary. |
215 | |
216 | Then a constructor function is installed so that, for each global |
217 | variable, it calls the runtime asan library function |
218 | __asan_register_globals_with an instance of this type: |
219 | |
220 | struct __asan_global |
221 | { |
222 | // Address of the beginning of the global variable. |
223 | const void *__beg; |
224 | |
225 | // Initial size of the global variable. |
226 | uptr __size; |
227 | |
228 | // Size of the global variable + size of the red zone. This |
229 | // size is 32 bytes aligned. |
230 | uptr __size_with_redzone; |
231 | |
232 | // Name of the global variable. |
233 | const void *__name; |
234 | |
235 | // Name of the module where the global variable is declared. |
236 | const void *__module_name; |
237 | |
238 | // 1 if it has dynamic initialization, 0 otherwise. |
239 | uptr __has_dynamic_init; |
240 | |
241 | // A pointer to struct that contains source location, could be NULL. |
242 | __asan_global_source_location *__location; |
243 | } |
244 | |
245 | A destructor function that calls the runtime asan library function |
246 | _asan_unregister_globals is also installed. */ |
247 | |
248 | static unsigned HOST_WIDE_INTlong asan_shadow_offset_value; |
249 | static bool asan_shadow_offset_computed; |
250 | static vec<char *> sanitized_sections; |
251 | static tree last_alloca_addr; |
252 | |
253 | /* Set of variable declarations that are going to be guarded by |
254 | use-after-scope sanitizer. */ |
255 | |
256 | hash_set<tree> *asan_handled_variables = NULLnullptr; |
257 | |
258 | hash_set <tree> *asan_used_labels = NULLnullptr; |
259 | |
260 | /* Global variables for HWASAN stack tagging. */ |
261 | /* hwasan_frame_tag_offset records the offset from the frame base tag that the |
262 | next object should have. */ |
263 | static uint8_t hwasan_frame_tag_offset = 0; |
264 | /* hwasan_frame_base_ptr is a pointer with the same address as |
265 | `virtual_stack_vars_rtx` for the current frame, and with the frame base tag |
266 | stored in it. N.b. this global RTX does not need to be marked GTY, but is |
267 | done so anyway. The need is not there since all uses are in just one pass |
268 | (cfgexpand) and there are no calls to ggc_collect between the uses. We mark |
269 | it GTY(()) anyway to allow the use of the variable later on if needed by |
270 | future features. */ |
271 | static GTY(()) rtx hwasan_frame_base_ptr = NULL_RTX(rtx) 0; |
272 | /* hwasan_frame_base_init_seq is the sequence of RTL insns that will initialize |
273 | the hwasan_frame_base_ptr. When the hwasan_frame_base_ptr is requested, we |
274 | generate this sequence but do not emit it. If the sequence was created it |
275 | is emitted once the function body has been expanded. |
276 | |
277 | This delay is because the frame base pointer may be needed anywhere in the |
278 | function body, or needed by the expand_used_vars function. Emitting once in |
279 | a known place is simpler than requiring the emission of the instructions to |
280 | be know where it should go depending on the first place the hwasan frame |
281 | base is needed. */ |
282 | static GTY(()) rtx_insn *hwasan_frame_base_init_seq = NULLnullptr; |
283 | |
284 | /* Structure defining the extent of one object on the stack that HWASAN needs |
285 | to tag in the corresponding shadow stack space. |
286 | |
287 | The range this object spans on the stack is between `untagged_base + |
288 | nearest_offset` and `untagged_base + farthest_offset`. |
289 | `tagged_base` is an rtx containing the same value as `untagged_base` but |
290 | with a random tag stored in the top byte. We record both `untagged_base` |
291 | and `tagged_base` so that `hwasan_emit_prologue` can use both without having |
292 | to emit RTL into the instruction stream to re-calculate one from the other. |
293 | (`hwasan_emit_prologue` needs to use both bases since the |
294 | __hwasan_tag_memory call it emits uses an untagged value, and it calculates |
295 | the tag to store in shadow memory based on the tag_offset plus the tag in |
296 | tagged_base). */ |
297 | struct hwasan_stack_var |
298 | { |
299 | rtx untagged_base; |
300 | rtx tagged_base; |
301 | poly_int64 nearest_offset; |
302 | poly_int64 farthest_offset; |
303 | uint8_t tag_offset; |
304 | }; |
305 | |
306 | /* Variable recording all stack variables that HWASAN needs to tag. |
307 | Does not need to be marked as GTY(()) since every use is in the cfgexpand |
308 | pass and gcc_collect is not called in the middle of that pass. */ |
309 | static vec<hwasan_stack_var> hwasan_tagged_stack_vars; |
310 | |
311 | |
312 | /* Sets shadow offset to value in string VAL. */ |
313 | |
314 | bool |
315 | set_asan_shadow_offset (const char *val) |
316 | { |
317 | char *endp; |
318 | |
319 | errno(*__errno_location ()) = 0; |
320 | #ifdef HAVE_LONG_LONG1 |
321 | asan_shadow_offset_value = strtoull (val, &endp, 0); |
322 | #else |
323 | asan_shadow_offset_value = strtoul (val, &endp, 0); |
324 | #endif |
325 | if (!(*val != '\0' && *endp == '\0' && errno(*__errno_location ()) == 0)) |
326 | return false; |
327 | |
328 | asan_shadow_offset_computed = true; |
329 | |
330 | return true; |
331 | } |
332 | |
333 | /* Set list of user-defined sections that need to be sanitized. */ |
334 | |
335 | void |
336 | set_sanitized_sections (const char *sections) |
337 | { |
338 | char *pat; |
339 | unsigned i; |
340 | FOR_EACH_VEC_ELT (sanitized_sections, i, pat)for (i = 0; (sanitized_sections).iterate ((i), &(pat)); ++ (i)) |
341 | free (pat); |
342 | sanitized_sections.truncate (0); |
343 | |
344 | for (const char *s = sections; *s; ) |
345 | { |
346 | const char *end; |
347 | for (end = s; *end && *end != ','; ++end); |
348 | size_t len = end - s; |
349 | sanitized_sections.safe_push (xstrndup (s, len)); |
350 | s = *end ? end + 1 : end; |
351 | } |
352 | } |
353 | |
354 | bool |
355 | asan_mark_p (gimple *stmt, enum asan_mark_flags flag) |
356 | { |
357 | return (gimple_call_internal_p (stmt, IFN_ASAN_MARK) |
358 | && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag); |
359 | } |
360 | |
361 | bool |
362 | asan_sanitize_stack_p (void) |
363 | { |
364 | return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stackglobal_options.x_param_asan_stack); |
365 | } |
366 | |
367 | bool |
368 | asan_sanitize_allocas_p (void) |
369 | { |
370 | return (asan_sanitize_stack_p () && param_asan_protect_allocasglobal_options.x_param_asan_protect_allocas); |
371 | } |
372 | |
373 | bool |
374 | asan_instrument_reads (void) |
375 | { |
376 | return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_readsglobal_options.x_param_asan_instrument_reads); |
377 | } |
378 | |
379 | bool |
380 | asan_instrument_writes (void) |
381 | { |
382 | return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_writesglobal_options.x_param_asan_instrument_writes); |
383 | } |
384 | |
385 | bool |
386 | asan_memintrin (void) |
387 | { |
388 | return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_memintringlobal_options.x_param_asan_memintrin); |
389 | } |
390 | |
391 | |
392 | /* Checks whether section SEC should be sanitized. */ |
393 | |
394 | static bool |
395 | section_sanitized_p (const char *sec) |
396 | { |
397 | char *pat; |
398 | unsigned i; |
399 | FOR_EACH_VEC_ELT (sanitized_sections, i, pat)for (i = 0; (sanitized_sections).iterate ((i), &(pat)); ++ (i)) |
400 | if (fnmatch (pat, sec, FNM_PERIOD(1 << 2)) == 0) |
401 | return true; |
402 | return false; |
403 | } |
404 | |
405 | /* Returns Asan shadow offset. */ |
406 | |
407 | static unsigned HOST_WIDE_INTlong |
408 | asan_shadow_offset () |
409 | { |
410 | if (!asan_shadow_offset_computed) |
411 | { |
412 | asan_shadow_offset_computed = true; |
413 | asan_shadow_offset_value = targetm.asan_shadow_offset (); |
414 | } |
415 | return asan_shadow_offset_value; |
416 | } |
417 | |
418 | /* Returns Asan shadow offset has been set. */ |
419 | bool |
420 | asan_shadow_offset_set_p () |
421 | { |
422 | return asan_shadow_offset_computed; |
423 | } |
424 | |
425 | alias_set_type asan_shadow_set = -1; |
426 | |
427 | /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate |
428 | alias set is used for all shadow memory accesses. */ |
429 | static GTY(()) tree shadow_ptr_types[3]; |
430 | |
431 | /* Decl for __asan_option_detect_stack_use_after_return. */ |
432 | static GTY(()) tree asan_detect_stack_use_after_return; |
433 | |
434 | /* Hashtable support for memory references used by gimple |
435 | statements. */ |
436 | |
437 | /* This type represents a reference to a memory region. */ |
438 | struct asan_mem_ref |
439 | { |
440 | /* The expression of the beginning of the memory region. */ |
441 | tree start; |
442 | |
443 | /* The size of the access. */ |
444 | HOST_WIDE_INTlong access_size; |
445 | }; |
446 | |
447 | object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref"); |
448 | |
449 | /* Initializes an instance of asan_mem_ref. */ |
450 | |
451 | static void |
452 | asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INTlong access_size) |
453 | { |
454 | ref->start = start; |
455 | ref->access_size = access_size; |
456 | } |
457 | |
458 | /* Allocates memory for an instance of asan_mem_ref into the memory |
459 | pool returned by asan_mem_ref_get_alloc_pool and initialize it. |
460 | START is the address of (or the expression pointing to) the |
461 | beginning of memory reference. ACCESS_SIZE is the size of the |
462 | access to the referenced memory. */ |
463 | |
464 | static asan_mem_ref* |
465 | asan_mem_ref_new (tree start, HOST_WIDE_INTlong access_size) |
466 | { |
467 | asan_mem_ref *ref = asan_mem_ref_pool.allocate (); |
468 | |
469 | asan_mem_ref_init (ref, start, access_size); |
470 | return ref; |
471 | } |
472 | |
473 | /* This builds and returns a pointer to the end of the memory region |
474 | that starts at START and of length LEN. */ |
475 | |
476 | tree |
477 | asan_mem_ref_get_end (tree start, tree len) |
478 | { |
479 | if (len == NULL_TREE(tree) nullptr || integer_zerop (len)) |
480 | return start; |
481 | |
482 | if (!ptrofftype_p (len)) |
483 | len = convert_to_ptrofftype (len)convert_to_ptrofftype_loc (((location_t) 0), len); |
484 | |
485 | return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len)fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check ((start), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 485, __FUNCTION__))->typed.type), start, len ); |
486 | } |
487 | |
488 | /* Return a tree expression that represents the end of the referenced |
489 | memory region. Beware that this function can actually build a new |
490 | tree expression. */ |
491 | |
492 | tree |
493 | asan_mem_ref_get_end (const asan_mem_ref *ref, tree len) |
494 | { |
495 | return asan_mem_ref_get_end (ref->start, len); |
496 | } |
497 | |
498 | struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref> |
499 | { |
500 | static inline hashval_t hash (const asan_mem_ref *); |
501 | static inline bool equal (const asan_mem_ref *, const asan_mem_ref *); |
502 | }; |
503 | |
504 | /* Hash a memory reference. */ |
505 | |
506 | inline hashval_t |
507 | asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref) |
508 | { |
509 | return iterative_hash_expr (mem_ref->start, 0); |
510 | } |
511 | |
512 | /* Compare two memory references. We accept the length of either |
513 | memory references to be NULL_TREE. */ |
514 | |
515 | inline bool |
516 | asan_mem_ref_hasher::equal (const asan_mem_ref *m1, |
517 | const asan_mem_ref *m2) |
518 | { |
519 | return operand_equal_p (m1->start, m2->start, 0); |
520 | } |
521 | |
522 | static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht; |
523 | |
524 | /* Returns a reference to the hash table containing memory references. |
525 | This function ensures that the hash table is created. Note that |
526 | this hash table is updated by the function |
527 | update_mem_ref_hash_table. */ |
528 | |
529 | static hash_table<asan_mem_ref_hasher> * |
530 | get_mem_ref_hash_table () |
531 | { |
532 | if (!asan_mem_ref_ht) |
533 | asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10); |
534 | |
535 | return asan_mem_ref_ht; |
536 | } |
537 | |
538 | /* Clear all entries from the memory references hash table. */ |
539 | |
540 | static void |
541 | empty_mem_ref_hash_table () |
542 | { |
543 | if (asan_mem_ref_ht) |
544 | asan_mem_ref_ht->empty (); |
545 | } |
546 | |
547 | /* Free the memory references hash table. */ |
548 | |
549 | static void |
550 | free_mem_ref_resources () |
551 | { |
552 | delete asan_mem_ref_ht; |
553 | asan_mem_ref_ht = NULLnullptr; |
554 | |
555 | asan_mem_ref_pool.release (); |
556 | } |
557 | |
558 | /* Return true iff the memory reference REF has been instrumented. */ |
559 | |
560 | static bool |
561 | has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INTlong access_size) |
562 | { |
563 | asan_mem_ref r; |
564 | asan_mem_ref_init (&r, ref, access_size); |
565 | |
566 | asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r); |
567 | return saved_ref && saved_ref->access_size >= access_size; |
568 | } |
569 | |
570 | /* Return true iff the memory reference REF has been instrumented. */ |
571 | |
572 | static bool |
573 | has_mem_ref_been_instrumented (const asan_mem_ref *ref) |
574 | { |
575 | return has_mem_ref_been_instrumented (ref->start, ref->access_size); |
576 | } |
577 | |
578 | /* Return true iff access to memory region starting at REF and of |
579 | length LEN has been instrumented. */ |
580 | |
581 | static bool |
582 | has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len) |
583 | { |
584 | HOST_WIDE_INTlong size_in_bytes |
585 | = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1; |
586 | |
587 | return size_in_bytes != -1 |
588 | && has_mem_ref_been_instrumented (ref->start, size_in_bytes); |
589 | } |
590 | |
591 | /* Set REF to the memory reference present in a gimple assignment |
592 | ASSIGNMENT. Return true upon successful completion, false |
593 | otherwise. */ |
594 | |
595 | static bool |
596 | get_mem_ref_of_assignment (const gassign *assignment, |
597 | asan_mem_ref *ref, |
598 | bool *ref_is_store) |
599 | { |
600 | gcc_assert (gimple_assign_single_p (assignment))((void)(!(gimple_assign_single_p (assignment)) ? fancy_abort ( "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 600, __FUNCTION__), 0 : 0)); |
601 | |
602 | if (gimple_store_p (assignment) |
603 | && !gimple_clobber_p (assignment)) |
604 | { |
605 | ref->start = gimple_assign_lhs (assignment); |
606 | *ref_is_store = true; |
607 | } |
608 | else if (gimple_assign_load_p (assignment)) |
609 | { |
610 | ref->start = gimple_assign_rhs1 (assignment); |
611 | *ref_is_store = false; |
612 | } |
613 | else |
614 | return false; |
615 | |
616 | ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start)((contains_struct_check ((ref->start), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 616, __FUNCTION__))->typed.type)); |
617 | return true; |
618 | } |
619 | |
620 | /* Return address of last allocated dynamic alloca. */ |
621 | |
622 | static tree |
623 | get_last_alloca_addr () |
624 | { |
625 | if (last_alloca_addr) |
626 | return last_alloca_addr; |
627 | |
628 | last_alloca_addr = create_tmp_reg (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "last_alloca_addr"); |
629 | gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_nodeglobal_trees[TI_NULL_POINTER]); |
630 | edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)); |
631 | gsi_insert_on_edge_immediate (e, g); |
632 | return last_alloca_addr; |
633 | } |
634 | |
635 | /* Insert __asan_allocas_unpoison (top, bottom) call before |
636 | __builtin_stack_restore (new_sp) call. |
637 | The pseudocode of this routine should look like this: |
638 | top = last_alloca_addr; |
639 | bot = new_sp; |
640 | __asan_allocas_unpoison (top, bot); |
641 | last_alloca_addr = new_sp; |
642 | __builtin_stack_restore (new_sp); |
643 | In general, we can't use new_sp as bot parameter because on some |
644 | architectures SP has non zero offset from dynamic stack area. Moreover, on |
645 | some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each |
646 | particular function only after all callees were expanded to rtl. |
647 | The most noticeable example is PowerPC{,64}, see |
648 | http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK. |
649 | To overcome the issue we use following trick: pass new_sp as a second |
650 | parameter to __asan_allocas_unpoison and rewrite it during expansion with |
651 | new_sp + (virtual_dynamic_stack_rtx - sp) later in |
652 | expand_asan_emit_allocas_unpoison function. |
653 | |
654 | HWASAN needs to do very similar, the eventual pseudocode should be: |
655 | __hwasan_tag_memory (virtual_stack_dynamic_rtx, |
656 | 0, |
657 | new_sp - sp); |
658 | __builtin_stack_restore (new_sp) |
659 | |
660 | Need to use the same trick to handle STACK_DYNAMIC_OFFSET as described |
661 | above. */ |
662 | |
663 | static void |
664 | handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter) |
665 | { |
666 | if (!iter |
667 | || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ())) |
668 | return; |
669 | |
670 | tree restored_stack = gimple_call_arg (call, 0); |
671 | |
672 | gimple *g; |
673 | |
674 | if (hwasan_sanitize_allocas_p ()) |
675 | { |
676 | enum internal_fn fn = IFN_HWASAN_ALLOCA_UNPOISON; |
677 | /* There is only one piece of information `expand_HWASAN_ALLOCA_UNPOISON` |
678 | needs to work. This is the length of the area that we're |
679 | deallocating. Since the stack pointer is known at expand time, the |
680 | position of the new stack pointer after deallocation is enough |
681 | information to calculate this length. */ |
682 | g = gimple_build_call_internal (fn, 1, restored_stack); |
683 | } |
684 | else |
685 | { |
686 | tree last_alloca = get_last_alloca_addr (); |
687 | tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON); |
688 | g = gimple_build_call (fn, 2, last_alloca, restored_stack); |
689 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
690 | g = gimple_build_assign (last_alloca, restored_stack); |
691 | } |
692 | |
693 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
694 | } |
695 | |
696 | /* Deploy and poison redzones around __builtin_alloca call. To do this, we |
697 | should replace this call with another one with changed parameters and |
698 | replace all its uses with new address, so |
699 | addr = __builtin_alloca (old_size, align); |
700 | is replaced by |
701 | left_redzone_size = max (align, ASAN_RED_ZONE_SIZE); |
702 | Following two statements are optimized out if we know that |
703 | old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial |
704 | redzone. |
705 | misalign = old_size & (ASAN_RED_ZONE_SIZE - 1); |
706 | partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign; |
707 | right_redzone_size = ASAN_RED_ZONE_SIZE; |
708 | additional_size = left_redzone_size + partial_redzone_size + |
709 | right_redzone_size; |
710 | new_size = old_size + additional_size; |
711 | new_alloca = __builtin_alloca (new_size, max (align, 32)) |
712 | __asan_alloca_poison (new_alloca, old_size) |
713 | addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE); |
714 | last_alloca_addr = new_alloca; |
715 | ADDITIONAL_SIZE is added to make new memory allocation contain not only |
716 | requested memory, but also left, partial and right redzones as well as some |
717 | additional space, required by alignment. */ |
718 | |
719 | static void |
720 | handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter) |
721 | { |
722 | if (!iter |
723 | || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ())) |
724 | return; |
725 | |
726 | gassign *g; |
727 | gcall *gg; |
728 | tree callee = gimple_call_fndecl (call); |
729 | tree old_size = gimple_call_arg (call, 0); |
730 | tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))((contains_struct_check ((gimple_call_lhs (call)), (TS_TYPED) , "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 730, __FUNCTION__))->typed.type) |
731 | : ptr_type_nodeglobal_trees[TI_PTR_TYPE]; |
732 | tree partial_size = NULL_TREE(tree) nullptr; |
733 | unsigned int align |
734 | = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA |
735 | ? 0 : tree_to_uhwi (gimple_call_arg (call, 1)); |
736 | |
737 | if (hwasan_sanitize_allocas_p ()) |
738 | { |
739 | gimple_seq stmts = NULLnullptr; |
740 | location_t loc = gimple_location (gsi_stmt (*iter)); |
741 | /* |
742 | HWASAN needs a different expansion. |
743 | |
744 | addr = __builtin_alloca (size, align); |
745 | |
746 | should be replaced by |
747 | |
748 | new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment; |
749 | untagged_addr = __builtin_alloca (new_size, align); |
750 | tag = __hwasan_choose_alloca_tag (); |
751 | addr = ifn_HWASAN_SET_TAG (untagged_addr, tag); |
752 | __hwasan_tag_memory (untagged_addr, tag, new_size); |
753 | */ |
754 | /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on |
755 | a tag granule. */ |
756 | align = align > HWASAN_TAG_GRANULE_SIZEtargetm.memtag.granule_size () ? align : HWASAN_TAG_GRANULE_SIZEtargetm.memtag.granule_size (); |
757 | |
758 | tree old_size = gimple_call_arg (call, 0); |
759 | tree new_size = gimple_build_round_up (&stmts, loc, size_type_nodeglobal_trees[TI_SIZE_TYPE], |
760 | old_size, |
761 | HWASAN_TAG_GRANULE_SIZEtargetm.memtag.granule_size ()); |
762 | |
763 | /* Make the alloca call */ |
764 | tree untagged_addr |
765 | = gimple_build (&stmts, loc, |
766 | as_combined_fn (BUILT_IN_ALLOCA_WITH_ALIGN), ptr_type, |
767 | new_size, build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], align)); |
768 | |
769 | /* Choose the tag. |
770 | Here we use an internal function so we can choose the tag at expand |
771 | time. We need the decision to be made after stack variables have been |
772 | assigned their tag (i.e. once the hwasan_frame_tag_offset variable has |
773 | been set to one after the last stack variables tag). */ |
774 | tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG, |
775 | unsigned_char_type_nodeinteger_types[itk_unsigned_char]); |
776 | |
777 | /* Add tag to pointer. */ |
778 | tree addr |
779 | = gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type, |
780 | untagged_addr, tag); |
781 | |
782 | /* Tag shadow memory. |
783 | NOTE: require using `untagged_addr` here for libhwasan API. */ |
784 | gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM), |
785 | void_type_nodeglobal_trees[TI_VOID_TYPE], untagged_addr, tag, new_size); |
786 | |
787 | /* Insert the built up code sequence into the original instruction stream |
788 | the iterator points to. */ |
789 | gsi_insert_seq_before (iter, stmts, GSI_SAME_STMT); |
790 | |
791 | /* Finally, replace old alloca ptr with NEW_ALLOCA. */ |
792 | replace_call_with_value (iter, addr); |
793 | return; |
794 | } |
795 | |
796 | tree last_alloca = get_last_alloca_addr (); |
797 | const HOST_WIDE_INTlong redzone_mask = ASAN_RED_ZONE_SIZE32 - 1; |
798 | |
799 | /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN |
800 | bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE |
801 | manually. */ |
802 | align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)((align) > (32 * (8)) ? (align) : (32 * (8))); |
803 | |
804 | tree alloca_rz_mask = build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], redzone_mask); |
805 | tree redzone_size = build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], ASAN_RED_ZONE_SIZE32); |
806 | |
807 | /* Extract lower bits from old_size. */ |
808 | wide_int size_nonzero_bits = get_nonzero_bits (old_size); |
809 | wide_int rz_mask |
810 | = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits)); |
811 | wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask); |
812 | |
813 | /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial |
814 | redzone. Otherwise, compute its size here. */ |
815 | if (wi::ne_p (old_size_lower_bits, 0)) |
816 | { |
817 | /* misalign = size & (ASAN_RED_ZONE_SIZE - 1) |
818 | partial_size = ASAN_RED_ZONE_SIZE - misalign. */ |
819 | g = gimple_build_assign (make_ssa_name (size_type_nodeglobal_trees[TI_SIZE_TYPE], NULLnullptr), |
820 | BIT_AND_EXPR, old_size, alloca_rz_mask); |
821 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
822 | tree misalign = gimple_assign_lhs (g); |
823 | g = gimple_build_assign (make_ssa_name (size_type_nodeglobal_trees[TI_SIZE_TYPE], NULLnullptr), MINUS_EXPR, |
824 | redzone_size, misalign); |
825 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
826 | partial_size = gimple_assign_lhs (g); |
827 | } |
828 | |
829 | /* additional_size = align + ASAN_RED_ZONE_SIZE. */ |
830 | tree additional_size = build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], align / BITS_PER_UNIT(8) |
831 | + ASAN_RED_ZONE_SIZE32); |
832 | /* If alloca has partial redzone, include it to additional_size too. */ |
833 | if (partial_size) |
834 | { |
835 | /* additional_size += partial_size. */ |
836 | g = gimple_build_assign (make_ssa_name (size_type_nodeglobal_trees[TI_SIZE_TYPE]), PLUS_EXPR, |
837 | partial_size, additional_size); |
838 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
839 | additional_size = gimple_assign_lhs (g); |
840 | } |
841 | |
842 | /* new_size = old_size + additional_size. */ |
843 | g = gimple_build_assign (make_ssa_name (size_type_nodeglobal_trees[TI_SIZE_TYPE]), PLUS_EXPR, old_size, |
844 | additional_size); |
845 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
846 | tree new_size = gimple_assign_lhs (g); |
847 | |
848 | /* Build new __builtin_alloca call: |
849 | new_alloca_with_rz = __builtin_alloca (new_size, align). */ |
850 | tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN); |
851 | gg = gimple_build_call (fn, 2, new_size, |
852 | build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], align)); |
853 | tree new_alloca_with_rz = make_ssa_name (ptr_type, gg); |
854 | gimple_call_set_lhs (gg, new_alloca_with_rz); |
855 | gsi_insert_before (iter, gg, GSI_SAME_STMT); |
856 | |
857 | /* new_alloca = new_alloca_with_rz + align. */ |
858 | g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR, |
859 | new_alloca_with_rz, |
860 | build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], |
861 | align / BITS_PER_UNIT(8))); |
862 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
863 | tree new_alloca = gimple_assign_lhs (g); |
864 | |
865 | /* Poison newly created alloca redzones: |
866 | __asan_alloca_poison (new_alloca, old_size). */ |
867 | fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON); |
868 | gg = gimple_build_call (fn, 2, new_alloca, old_size); |
869 | gsi_insert_before (iter, gg, GSI_SAME_STMT); |
870 | |
871 | /* Save new_alloca_with_rz value into last_alloca to use it during |
872 | allocas unpoisoning. */ |
873 | g = gimple_build_assign (last_alloca, new_alloca_with_rz); |
874 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
875 | |
876 | /* Finally, replace old alloca ptr with NEW_ALLOCA. */ |
877 | replace_call_with_value (iter, new_alloca); |
878 | } |
879 | |
880 | /* Return the memory references contained in a gimple statement |
881 | representing a builtin call that has to do with memory access. */ |
882 | |
883 | static bool |
884 | get_mem_refs_of_builtin_call (gcall *call, |
885 | asan_mem_ref *src0, |
886 | tree *src0_len, |
887 | bool *src0_is_store, |
888 | asan_mem_ref *src1, |
889 | tree *src1_len, |
890 | bool *src1_is_store, |
891 | asan_mem_ref *dst, |
892 | tree *dst_len, |
893 | bool *dst_is_store, |
894 | bool *dest_is_deref, |
895 | bool *intercepted_p, |
896 | gimple_stmt_iterator *iter = NULLnullptr) |
897 | { |
898 | gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL))((void)(!(gimple_call_builtin_p (call, BUILT_IN_NORMAL)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 898, __FUNCTION__), 0 : 0)); |
899 | |
900 | tree callee = gimple_call_fndecl (call); |
901 | tree source0 = NULL_TREE(tree) nullptr, source1 = NULL_TREE(tree) nullptr, |
902 | dest = NULL_TREE(tree) nullptr, len = NULL_TREE(tree) nullptr; |
903 | bool is_store = true, got_reference_p = false; |
904 | HOST_WIDE_INTlong access_size = 1; |
905 | |
906 | *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee))); |
907 | |
908 | switch (DECL_FUNCTION_CODE (callee)) |
909 | { |
910 | /* (s, s, n) style memops. */ |
911 | case BUILT_IN_BCMP: |
912 | case BUILT_IN_MEMCMP: |
913 | source0 = gimple_call_arg (call, 0); |
914 | source1 = gimple_call_arg (call, 1); |
915 | len = gimple_call_arg (call, 2); |
916 | break; |
917 | |
918 | /* (src, dest, n) style memops. */ |
919 | case BUILT_IN_BCOPY: |
920 | source0 = gimple_call_arg (call, 0); |
921 | dest = gimple_call_arg (call, 1); |
922 | len = gimple_call_arg (call, 2); |
923 | break; |
924 | |
925 | /* (dest, src, n) style memops. */ |
926 | case BUILT_IN_MEMCPY: |
927 | case BUILT_IN_MEMCPY_CHK: |
928 | case BUILT_IN_MEMMOVE: |
929 | case BUILT_IN_MEMMOVE_CHK: |
930 | case BUILT_IN_MEMPCPY: |
931 | case BUILT_IN_MEMPCPY_CHK: |
932 | dest = gimple_call_arg (call, 0); |
933 | source0 = gimple_call_arg (call, 1); |
934 | len = gimple_call_arg (call, 2); |
935 | break; |
936 | |
937 | /* (dest, n) style memops. */ |
938 | case BUILT_IN_BZERO: |
939 | dest = gimple_call_arg (call, 0); |
940 | len = gimple_call_arg (call, 1); |
941 | break; |
942 | |
943 | /* (dest, x, n) style memops*/ |
944 | case BUILT_IN_MEMSET: |
945 | case BUILT_IN_MEMSET_CHK: |
946 | dest = gimple_call_arg (call, 0); |
947 | len = gimple_call_arg (call, 2); |
948 | break; |
949 | |
950 | case BUILT_IN_STRLEN: |
951 | /* Special case strlen here since its length is taken from its return |
952 | value. |
953 | |
954 | The approach taken by the sanitizers is to check a memory access |
955 | before it's taken. For ASAN strlen is intercepted by libasan, so no |
956 | check is inserted by the compiler. |
957 | |
958 | This function still returns `true` and provides a length to the rest |
959 | of the ASAN pass in order to record what areas have been checked, |
960 | avoiding superfluous checks later on. |
961 | |
962 | HWASAN does not intercept any of these internal functions. |
963 | This means that checks for memory accesses must be inserted by the |
964 | compiler. |
965 | strlen is a special case, because we can tell the length from the |
966 | return of the function, but that is not known until after the function |
967 | has returned. |
968 | |
969 | Hence we can't check the memory access before it happens. |
970 | We could check the memory access after it has already happened, but |
971 | for now we choose to just ignore `strlen` calls. |
972 | This decision was simply made because that means the special case is |
973 | limited to this one case of this one function. */ |
974 | if (hwasan_sanitize_p ()) |
975 | return false; |
976 | source0 = gimple_call_arg (call, 0); |
977 | len = gimple_call_lhs (call); |
978 | break; |
979 | |
980 | case BUILT_IN_STACK_RESTORE: |
981 | handle_builtin_stack_restore (call, iter); |
982 | break; |
983 | |
984 | CASE_BUILT_IN_ALLOCAcase BUILT_IN_ALLOCA: case BUILT_IN_ALLOCA_WITH_ALIGN: case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX: |
985 | handle_builtin_alloca (call, iter); |
986 | break; |
987 | /* And now the __atomic* and __sync builtins. |
988 | These are handled differently from the classical memory |
989 | access builtins above. */ |
990 | |
991 | case BUILT_IN_ATOMIC_LOAD_1: |
992 | is_store = false; |
993 | /* FALLTHRU */ |
994 | case BUILT_IN_SYNC_FETCH_AND_ADD_1: |
995 | case BUILT_IN_SYNC_FETCH_AND_SUB_1: |
996 | case BUILT_IN_SYNC_FETCH_AND_OR_1: |
997 | case BUILT_IN_SYNC_FETCH_AND_AND_1: |
998 | case BUILT_IN_SYNC_FETCH_AND_XOR_1: |
999 | case BUILT_IN_SYNC_FETCH_AND_NAND_1: |
1000 | case BUILT_IN_SYNC_ADD_AND_FETCH_1: |
1001 | case BUILT_IN_SYNC_SUB_AND_FETCH_1: |
1002 | case BUILT_IN_SYNC_OR_AND_FETCH_1: |
1003 | case BUILT_IN_SYNC_AND_AND_FETCH_1: |
1004 | case BUILT_IN_SYNC_XOR_AND_FETCH_1: |
1005 | case BUILT_IN_SYNC_NAND_AND_FETCH_1: |
1006 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1: |
1007 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1: |
1008 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1: |
1009 | case BUILT_IN_SYNC_LOCK_RELEASE_1: |
1010 | case BUILT_IN_ATOMIC_EXCHANGE_1: |
1011 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1: |
1012 | case BUILT_IN_ATOMIC_STORE_1: |
1013 | case BUILT_IN_ATOMIC_ADD_FETCH_1: |
1014 | case BUILT_IN_ATOMIC_SUB_FETCH_1: |
1015 | case BUILT_IN_ATOMIC_AND_FETCH_1: |
1016 | case BUILT_IN_ATOMIC_NAND_FETCH_1: |
1017 | case BUILT_IN_ATOMIC_XOR_FETCH_1: |
1018 | case BUILT_IN_ATOMIC_OR_FETCH_1: |
1019 | case BUILT_IN_ATOMIC_FETCH_ADD_1: |
1020 | case BUILT_IN_ATOMIC_FETCH_SUB_1: |
1021 | case BUILT_IN_ATOMIC_FETCH_AND_1: |
1022 | case BUILT_IN_ATOMIC_FETCH_NAND_1: |
1023 | case BUILT_IN_ATOMIC_FETCH_XOR_1: |
1024 | case BUILT_IN_ATOMIC_FETCH_OR_1: |
1025 | access_size = 1; |
1026 | goto do_atomic; |
1027 | |
1028 | case BUILT_IN_ATOMIC_LOAD_2: |
1029 | is_store = false; |
1030 | /* FALLTHRU */ |
1031 | case BUILT_IN_SYNC_FETCH_AND_ADD_2: |
1032 | case BUILT_IN_SYNC_FETCH_AND_SUB_2: |
1033 | case BUILT_IN_SYNC_FETCH_AND_OR_2: |
1034 | case BUILT_IN_SYNC_FETCH_AND_AND_2: |
1035 | case BUILT_IN_SYNC_FETCH_AND_XOR_2: |
1036 | case BUILT_IN_SYNC_FETCH_AND_NAND_2: |
1037 | case BUILT_IN_SYNC_ADD_AND_FETCH_2: |
1038 | case BUILT_IN_SYNC_SUB_AND_FETCH_2: |
1039 | case BUILT_IN_SYNC_OR_AND_FETCH_2: |
1040 | case BUILT_IN_SYNC_AND_AND_FETCH_2: |
1041 | case BUILT_IN_SYNC_XOR_AND_FETCH_2: |
1042 | case BUILT_IN_SYNC_NAND_AND_FETCH_2: |
1043 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2: |
1044 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2: |
1045 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2: |
1046 | case BUILT_IN_SYNC_LOCK_RELEASE_2: |
1047 | case BUILT_IN_ATOMIC_EXCHANGE_2: |
1048 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2: |
1049 | case BUILT_IN_ATOMIC_STORE_2: |
1050 | case BUILT_IN_ATOMIC_ADD_FETCH_2: |
1051 | case BUILT_IN_ATOMIC_SUB_FETCH_2: |
1052 | case BUILT_IN_ATOMIC_AND_FETCH_2: |
1053 | case BUILT_IN_ATOMIC_NAND_FETCH_2: |
1054 | case BUILT_IN_ATOMIC_XOR_FETCH_2: |
1055 | case BUILT_IN_ATOMIC_OR_FETCH_2: |
1056 | case BUILT_IN_ATOMIC_FETCH_ADD_2: |
1057 | case BUILT_IN_ATOMIC_FETCH_SUB_2: |
1058 | case BUILT_IN_ATOMIC_FETCH_AND_2: |
1059 | case BUILT_IN_ATOMIC_FETCH_NAND_2: |
1060 | case BUILT_IN_ATOMIC_FETCH_XOR_2: |
1061 | case BUILT_IN_ATOMIC_FETCH_OR_2: |
1062 | access_size = 2; |
1063 | goto do_atomic; |
1064 | |
1065 | case BUILT_IN_ATOMIC_LOAD_4: |
1066 | is_store = false; |
1067 | /* FALLTHRU */ |
1068 | case BUILT_IN_SYNC_FETCH_AND_ADD_4: |
1069 | case BUILT_IN_SYNC_FETCH_AND_SUB_4: |
1070 | case BUILT_IN_SYNC_FETCH_AND_OR_4: |
1071 | case BUILT_IN_SYNC_FETCH_AND_AND_4: |
1072 | case BUILT_IN_SYNC_FETCH_AND_XOR_4: |
1073 | case BUILT_IN_SYNC_FETCH_AND_NAND_4: |
1074 | case BUILT_IN_SYNC_ADD_AND_FETCH_4: |
1075 | case BUILT_IN_SYNC_SUB_AND_FETCH_4: |
1076 | case BUILT_IN_SYNC_OR_AND_FETCH_4: |
1077 | case BUILT_IN_SYNC_AND_AND_FETCH_4: |
1078 | case BUILT_IN_SYNC_XOR_AND_FETCH_4: |
1079 | case BUILT_IN_SYNC_NAND_AND_FETCH_4: |
1080 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4: |
1081 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4: |
1082 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4: |
1083 | case BUILT_IN_SYNC_LOCK_RELEASE_4: |
1084 | case BUILT_IN_ATOMIC_EXCHANGE_4: |
1085 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4: |
1086 | case BUILT_IN_ATOMIC_STORE_4: |
1087 | case BUILT_IN_ATOMIC_ADD_FETCH_4: |
1088 | case BUILT_IN_ATOMIC_SUB_FETCH_4: |
1089 | case BUILT_IN_ATOMIC_AND_FETCH_4: |
1090 | case BUILT_IN_ATOMIC_NAND_FETCH_4: |
1091 | case BUILT_IN_ATOMIC_XOR_FETCH_4: |
1092 | case BUILT_IN_ATOMIC_OR_FETCH_4: |
1093 | case BUILT_IN_ATOMIC_FETCH_ADD_4: |
1094 | case BUILT_IN_ATOMIC_FETCH_SUB_4: |
1095 | case BUILT_IN_ATOMIC_FETCH_AND_4: |
1096 | case BUILT_IN_ATOMIC_FETCH_NAND_4: |
1097 | case BUILT_IN_ATOMIC_FETCH_XOR_4: |
1098 | case BUILT_IN_ATOMIC_FETCH_OR_4: |
1099 | access_size = 4; |
1100 | goto do_atomic; |
1101 | |
1102 | case BUILT_IN_ATOMIC_LOAD_8: |
1103 | is_store = false; |
1104 | /* FALLTHRU */ |
1105 | case BUILT_IN_SYNC_FETCH_AND_ADD_8: |
1106 | case BUILT_IN_SYNC_FETCH_AND_SUB_8: |
1107 | case BUILT_IN_SYNC_FETCH_AND_OR_8: |
1108 | case BUILT_IN_SYNC_FETCH_AND_AND_8: |
1109 | case BUILT_IN_SYNC_FETCH_AND_XOR_8: |
1110 | case BUILT_IN_SYNC_FETCH_AND_NAND_8: |
1111 | case BUILT_IN_SYNC_ADD_AND_FETCH_8: |
1112 | case BUILT_IN_SYNC_SUB_AND_FETCH_8: |
1113 | case BUILT_IN_SYNC_OR_AND_FETCH_8: |
1114 | case BUILT_IN_SYNC_AND_AND_FETCH_8: |
1115 | case BUILT_IN_SYNC_XOR_AND_FETCH_8: |
1116 | case BUILT_IN_SYNC_NAND_AND_FETCH_8: |
1117 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8: |
1118 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8: |
1119 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8: |
1120 | case BUILT_IN_SYNC_LOCK_RELEASE_8: |
1121 | case BUILT_IN_ATOMIC_EXCHANGE_8: |
1122 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8: |
1123 | case BUILT_IN_ATOMIC_STORE_8: |
1124 | case BUILT_IN_ATOMIC_ADD_FETCH_8: |
1125 | case BUILT_IN_ATOMIC_SUB_FETCH_8: |
1126 | case BUILT_IN_ATOMIC_AND_FETCH_8: |
1127 | case BUILT_IN_ATOMIC_NAND_FETCH_8: |
1128 | case BUILT_IN_ATOMIC_XOR_FETCH_8: |
1129 | case BUILT_IN_ATOMIC_OR_FETCH_8: |
1130 | case BUILT_IN_ATOMIC_FETCH_ADD_8: |
1131 | case BUILT_IN_ATOMIC_FETCH_SUB_8: |
1132 | case BUILT_IN_ATOMIC_FETCH_AND_8: |
1133 | case BUILT_IN_ATOMIC_FETCH_NAND_8: |
1134 | case BUILT_IN_ATOMIC_FETCH_XOR_8: |
1135 | case BUILT_IN_ATOMIC_FETCH_OR_8: |
1136 | access_size = 8; |
1137 | goto do_atomic; |
1138 | |
1139 | case BUILT_IN_ATOMIC_LOAD_16: |
1140 | is_store = false; |
1141 | /* FALLTHRU */ |
1142 | case BUILT_IN_SYNC_FETCH_AND_ADD_16: |
1143 | case BUILT_IN_SYNC_FETCH_AND_SUB_16: |
1144 | case BUILT_IN_SYNC_FETCH_AND_OR_16: |
1145 | case BUILT_IN_SYNC_FETCH_AND_AND_16: |
1146 | case BUILT_IN_SYNC_FETCH_AND_XOR_16: |
1147 | case BUILT_IN_SYNC_FETCH_AND_NAND_16: |
1148 | case BUILT_IN_SYNC_ADD_AND_FETCH_16: |
1149 | case BUILT_IN_SYNC_SUB_AND_FETCH_16: |
1150 | case BUILT_IN_SYNC_OR_AND_FETCH_16: |
1151 | case BUILT_IN_SYNC_AND_AND_FETCH_16: |
1152 | case BUILT_IN_SYNC_XOR_AND_FETCH_16: |
1153 | case BUILT_IN_SYNC_NAND_AND_FETCH_16: |
1154 | case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16: |
1155 | case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16: |
1156 | case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16: |
1157 | case BUILT_IN_SYNC_LOCK_RELEASE_16: |
1158 | case BUILT_IN_ATOMIC_EXCHANGE_16: |
1159 | case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16: |
1160 | case BUILT_IN_ATOMIC_STORE_16: |
1161 | case BUILT_IN_ATOMIC_ADD_FETCH_16: |
1162 | case BUILT_IN_ATOMIC_SUB_FETCH_16: |
1163 | case BUILT_IN_ATOMIC_AND_FETCH_16: |
1164 | case BUILT_IN_ATOMIC_NAND_FETCH_16: |
1165 | case BUILT_IN_ATOMIC_XOR_FETCH_16: |
1166 | case BUILT_IN_ATOMIC_OR_FETCH_16: |
1167 | case BUILT_IN_ATOMIC_FETCH_ADD_16: |
1168 | case BUILT_IN_ATOMIC_FETCH_SUB_16: |
1169 | case BUILT_IN_ATOMIC_FETCH_AND_16: |
1170 | case BUILT_IN_ATOMIC_FETCH_NAND_16: |
1171 | case BUILT_IN_ATOMIC_FETCH_XOR_16: |
1172 | case BUILT_IN_ATOMIC_FETCH_OR_16: |
1173 | access_size = 16; |
1174 | /* FALLTHRU */ |
1175 | do_atomic: |
1176 | { |
1177 | dest = gimple_call_arg (call, 0); |
1178 | /* DEST represents the address of a memory location. |
1179 | instrument_derefs wants the memory location, so lets |
1180 | dereference the address DEST before handing it to |
1181 | instrument_derefs. */ |
1182 | tree type = build_nonstandard_integer_type (access_size |
1183 | * BITS_PER_UNIT(8), 1); |
1184 | dest = build2 (MEM_REF, type, dest, |
1185 | build_int_cst (build_pointer_type (char_type_nodeinteger_types[itk_char]), 0)); |
1186 | break; |
1187 | } |
1188 | |
1189 | default: |
1190 | /* The other builtins memory access are not instrumented in this |
1191 | function because they either don't have any length parameter, |
1192 | or their length parameter is just a limit. */ |
1193 | break; |
1194 | } |
1195 | |
1196 | if (len != NULL_TREE(tree) nullptr) |
1197 | { |
1198 | if (source0 != NULL_TREE(tree) nullptr) |
1199 | { |
1200 | src0->start = source0; |
1201 | src0->access_size = access_size; |
1202 | *src0_len = len; |
1203 | *src0_is_store = false; |
1204 | } |
1205 | |
1206 | if (source1 != NULL_TREE(tree) nullptr) |
1207 | { |
1208 | src1->start = source1; |
1209 | src1->access_size = access_size; |
1210 | *src1_len = len; |
1211 | *src1_is_store = false; |
1212 | } |
1213 | |
1214 | if (dest != NULL_TREE(tree) nullptr) |
1215 | { |
1216 | dst->start = dest; |
1217 | dst->access_size = access_size; |
1218 | *dst_len = len; |
1219 | *dst_is_store = true; |
1220 | } |
1221 | |
1222 | got_reference_p = true; |
1223 | } |
1224 | else if (dest) |
1225 | { |
1226 | dst->start = dest; |
1227 | dst->access_size = access_size; |
1228 | *dst_len = NULL_TREE(tree) nullptr; |
1229 | *dst_is_store = is_store; |
1230 | *dest_is_deref = true; |
1231 | got_reference_p = true; |
1232 | } |
1233 | |
1234 | return got_reference_p; |
1235 | } |
1236 | |
1237 | /* Return true iff a given gimple statement has been instrumented. |
1238 | Note that the statement is "defined" by the memory references it |
1239 | contains. */ |
1240 | |
1241 | static bool |
1242 | has_stmt_been_instrumented_p (gimple *stmt) |
1243 | { |
1244 | if (gimple_assign_single_p (stmt)) |
1245 | { |
1246 | bool r_is_store; |
1247 | asan_mem_ref r; |
1248 | asan_mem_ref_init (&r, NULLnullptr, 1); |
1249 | |
1250 | if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r, |
1251 | &r_is_store)) |
1252 | return has_mem_ref_been_instrumented (&r); |
1253 | } |
1254 | else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
1255 | { |
1256 | asan_mem_ref src0, src1, dest; |
1257 | asan_mem_ref_init (&src0, NULLnullptr, 1); |
1258 | asan_mem_ref_init (&src1, NULLnullptr, 1); |
1259 | asan_mem_ref_init (&dest, NULLnullptr, 1); |
1260 | |
1261 | tree src0_len = NULL_TREE(tree) nullptr, src1_len = NULL_TREE(tree) nullptr, dest_len = NULL_TREE(tree) nullptr; |
1262 | bool src0_is_store = false, src1_is_store = false, |
1263 | dest_is_store = false, dest_is_deref = false, intercepted_p = true; |
1264 | if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt), |
1265 | &src0, &src0_len, &src0_is_store, |
1266 | &src1, &src1_len, &src1_is_store, |
1267 | &dest, &dest_len, &dest_is_store, |
1268 | &dest_is_deref, &intercepted_p)) |
1269 | { |
1270 | if (src0.start != NULL_TREE(tree) nullptr |
1271 | && !has_mem_ref_been_instrumented (&src0, src0_len)) |
1272 | return false; |
1273 | |
1274 | if (src1.start != NULL_TREE(tree) nullptr |
1275 | && !has_mem_ref_been_instrumented (&src1, src1_len)) |
1276 | return false; |
1277 | |
1278 | if (dest.start != NULL_TREE(tree) nullptr |
1279 | && !has_mem_ref_been_instrumented (&dest, dest_len)) |
1280 | return false; |
1281 | |
1282 | return true; |
1283 | } |
1284 | } |
1285 | else if (is_gimple_call (stmt) && gimple_store_p (stmt)) |
1286 | { |
1287 | asan_mem_ref r; |
1288 | asan_mem_ref_init (&r, NULLnullptr, 1); |
1289 | |
1290 | r.start = gimple_call_lhs (stmt); |
1291 | r.access_size = int_size_in_bytes (TREE_TYPE (r.start)((contains_struct_check ((r.start), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1291, __FUNCTION__))->typed.type)); |
1292 | return has_mem_ref_been_instrumented (&r); |
1293 | } |
1294 | |
1295 | return false; |
1296 | } |
1297 | |
1298 | /* Insert a memory reference into the hash table. */ |
1299 | |
1300 | static void |
1301 | update_mem_ref_hash_table (tree ref, HOST_WIDE_INTlong access_size) |
1302 | { |
1303 | hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table (); |
1304 | |
1305 | asan_mem_ref r; |
1306 | asan_mem_ref_init (&r, ref, access_size); |
1307 | |
1308 | asan_mem_ref **slot = ht->find_slot (&r, INSERT); |
1309 | if (*slot == NULLnullptr || (*slot)->access_size < access_size) |
1310 | *slot = asan_mem_ref_new (ref, access_size); |
1311 | } |
1312 | |
1313 | /* Initialize shadow_ptr_types array. */ |
1314 | |
1315 | static void |
1316 | asan_init_shadow_ptr_types (void) |
1317 | { |
1318 | asan_shadow_set = new_alias_set (); |
1319 | tree types[3] = { signed_char_type_nodeinteger_types[itk_signed_char], short_integer_type_nodeinteger_types[itk_short], |
1320 | integer_type_nodeinteger_types[itk_int] }; |
1321 | |
1322 | for (unsigned i = 0; i < 3; i++) |
1323 | { |
1324 | shadow_ptr_types[i] = build_distinct_type_copy (types[i]); |
1325 | TYPE_ALIAS_SET (shadow_ptr_types[i])((tree_class_check ((shadow_ptr_types[i]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1325, __FUNCTION__))->type_common.alias_set) = asan_shadow_set; |
1326 | shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]); |
1327 | } |
1328 | |
1329 | initialize_sanitizer_builtins (); |
1330 | } |
1331 | |
1332 | /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */ |
1333 | |
1334 | static tree |
1335 | asan_pp_string (pretty_printer *pp) |
1336 | { |
1337 | const char *buf = pp_formatted_text (pp); |
1338 | size_t len = strlen (buf); |
1339 | tree ret = build_string (len + 1, buf); |
1340 | TREE_TYPE (ret)((contains_struct_check ((ret), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1340, __FUNCTION__))->typed.type) |
1341 | = build_array_type (TREE_TYPE (shadow_ptr_types[0])((contains_struct_check ((shadow_ptr_types[0]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1341, __FUNCTION__))->typed.type), |
1342 | build_index_type (size_int (len)size_int_kind (len, stk_sizetype))); |
1343 | TREE_READONLY (ret)((non_type_check ((ret), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1343, __FUNCTION__))->base.readonly_flag) = 1; |
1344 | TREE_STATIC (ret)((ret)->base.static_flag) = 1; |
1345 | return build1 (ADDR_EXPR, shadow_ptr_types[0], ret); |
1346 | } |
1347 | |
1348 | /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here |
1349 | though. */ |
1350 | |
1351 | static void |
1352 | asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INTlong len) |
1353 | { |
1354 | rtx_insn *insn, *insns, *jump; |
1355 | rtx_code_label *top_label; |
1356 | rtx end, addr, tmp; |
1357 | |
1358 | gcc_assert ((len & 3) == 0)((void)(!((len & 3) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1358, __FUNCTION__), 0 : 0)); |
1359 | start_sequence (); |
1360 | clear_storage (shadow_mem, GEN_INT (len)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (len)), BLOCK_OP_NORMAL); |
1361 | insns = get_insns (); |
1362 | end_sequence (); |
1363 | for (insn = insns; insn; insn = NEXT_INSN (insn)) |
1364 | if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) |
1365 | break; |
1366 | if (insn == NULL_RTX(rtx) 0) |
1367 | { |
1368 | emit_insn (insns); |
1369 | return; |
1370 | } |
1371 | |
1372 | top_label = gen_label_rtx (); |
1373 | addr = copy_to_mode_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), XEXP (shadow_mem, 0)(((shadow_mem)->u.fld[0]).rt_rtx)); |
1374 | shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0)adjust_automodify_address_1 (shadow_mem, (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)), addr, 0, 1); |
1375 | end = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), addr, len)); |
1376 | emit_label (top_label); |
1377 | |
1378 | emit_move_insn (shadow_mem, const0_rtx(const_int_rtx[64])); |
1379 | tmp = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), PLUS, addr, gen_int_mode (4, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))), addr, |
1380 | true, OPTAB_LIB_WIDEN); |
1381 | if (tmp != addr) |
1382 | emit_move_insn (addr, tmp); |
1383 | emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), true, top_label); |
1384 | jump = get_last_insn (); |
1385 | gcc_assert (JUMP_P (jump))((void)(!((((enum rtx_code) (jump)->code) == JUMP_INSN)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1385, __FUNCTION__), 0 : 0)); |
1386 | add_reg_br_prob_note (jump, |
1387 | profile_probability::guessed_always () |
1388 | .apply_scale (80, 100)); |
1389 | } |
1390 | |
1391 | void |
1392 | asan_function_start (void) |
1393 | { |
1394 | section *fnsec = function_section (current_function_decl); |
1395 | switch_to_section (fnsec); |
1396 | ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",(*targetm.asm_out.internal_label) (asm_out_file, "LASANPC", ( (cfun + 0)->funcdef_no)) |
1397 | current_function_funcdef_no)(*targetm.asm_out.internal_label) (asm_out_file, "LASANPC", ( (cfun + 0)->funcdef_no)); |
1398 | } |
1399 | |
1400 | /* Return number of shadow bytes that are occupied by a local variable |
1401 | of SIZE bytes. */ |
1402 | |
1403 | static unsigned HOST_WIDE_INTlong |
1404 | shadow_mem_size (unsigned HOST_WIDE_INTlong size) |
1405 | { |
1406 | /* It must be possible to align stack variables to granularity |
1407 | of shadow memory. */ |
1408 | gcc_assert (BITS_PER_UNIT((void)(!((8) * (1UL << 3) <= (((unsigned int) 1 << 28) * 8)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1409, __FUNCTION__), 0 : 0)) |
1409 | * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT)((void)(!((8) * (1UL << 3) <= (((unsigned int) 1 << 28) * 8)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1409, __FUNCTION__), 0 : 0)); |
1410 | |
1411 | return ROUND_UP (size, ASAN_SHADOW_GRANULARITY)(((size) + ((1UL << 3)) - 1) & ~(((1UL << 3)) - 1)) / ASAN_SHADOW_GRANULARITY(1UL << 3); |
1412 | } |
1413 | |
1414 | /* Always emit 4 bytes at a time. */ |
1415 | #define RZ_BUFFER_SIZE4 4 |
1416 | |
1417 | /* ASAN redzone buffer container that handles emission of shadow bytes. */ |
1418 | class asan_redzone_buffer |
1419 | { |
1420 | public: |
1421 | /* Constructor. */ |
1422 | asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INTlong prev_offset): |
1423 | m_shadow_mem (shadow_mem), m_prev_offset (prev_offset), |
1424 | m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE4) |
1425 | {} |
1426 | |
1427 | /* Emit VALUE shadow byte at a given OFFSET. */ |
1428 | void emit_redzone_byte (HOST_WIDE_INTlong offset, unsigned char value); |
1429 | |
1430 | /* Emit RTX emission of the content of the buffer. */ |
1431 | void flush_redzone_payload (void); |
1432 | |
1433 | private: |
1434 | /* Flush if the content of the buffer is full |
1435 | (equal to RZ_BUFFER_SIZE). */ |
1436 | void flush_if_full (void); |
1437 | |
1438 | /* Memory where we last emitted a redzone payload. */ |
1439 | rtx m_shadow_mem; |
1440 | |
1441 | /* Relative offset where we last emitted a redzone payload. */ |
1442 | HOST_WIDE_INTlong m_prev_offset; |
1443 | |
1444 | /* Relative original offset. Used for checking only. */ |
1445 | HOST_WIDE_INTlong m_original_offset; |
1446 | |
1447 | public: |
1448 | /* Buffer with redzone payload. */ |
1449 | auto_vec<unsigned char> m_shadow_bytes; |
1450 | }; |
1451 | |
1452 | /* Emit VALUE shadow byte at a given OFFSET. */ |
1453 | |
1454 | void |
1455 | asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INTlong offset, |
1456 | unsigned char value) |
1457 | { |
1458 | gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0)((void)(!((offset & ((1UL << 3) - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1458, __FUNCTION__), 0 : 0)); |
1459 | gcc_assert (offset >= m_prev_offset)((void)(!(offset >= m_prev_offset) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1459, __FUNCTION__), 0 : 0)); |
1460 | |
1461 | HOST_WIDE_INTlong off |
1462 | = m_prev_offset + ASAN_SHADOW_GRANULARITY(1UL << 3) * m_shadow_bytes.length (); |
1463 | if (off == offset) |
1464 | { |
1465 | /* Consecutive shadow memory byte. */ |
1466 | m_shadow_bytes.safe_push (value); |
1467 | flush_if_full (); |
1468 | } |
1469 | else |
1470 | { |
1471 | if (!m_shadow_bytes.is_empty ()) |
1472 | flush_redzone_payload (); |
1473 | |
1474 | /* Maybe start earlier in order to use aligned store. */ |
1475 | HOST_WIDE_INTlong align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE32; |
1476 | if (align) |
1477 | { |
1478 | offset -= align; |
1479 | for (unsigned i = 0; i < align / BITS_PER_UNIT(8); i++) |
1480 | m_shadow_bytes.safe_push (0); |
1481 | } |
1482 | |
1483 | /* Adjust m_prev_offset and m_shadow_mem. */ |
1484 | HOST_WIDE_INTlong diff = offset - m_prev_offset; |
1485 | m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,adjust_address_1 (m_shadow_mem, ((void) 0, E_VOIDmode), diff >> 3, 1, 1, 0, 0) |
1486 | diff >> ASAN_SHADOW_SHIFT)adjust_address_1 (m_shadow_mem, ((void) 0, E_VOIDmode), diff >> 3, 1, 1, 0, 0); |
1487 | m_prev_offset = offset; |
1488 | m_shadow_bytes.safe_push (value); |
1489 | flush_if_full (); |
1490 | } |
1491 | } |
1492 | |
1493 | /* Emit RTX emission of the content of the buffer. */ |
1494 | |
1495 | void |
1496 | asan_redzone_buffer::flush_redzone_payload (void) |
1497 | { |
1498 | gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN)((void)(!(0 == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1498, __FUNCTION__), 0 : 0)); |
1499 | |
1500 | if (m_shadow_bytes.is_empty ()) |
1501 | return; |
1502 | |
1503 | /* Be sure we always emit to an aligned address. */ |
1504 | gcc_assert (((m_prev_offset - m_original_offset)((void)(!(((m_prev_offset - m_original_offset) & (32 - 1) ) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1505, __FUNCTION__), 0 : 0)) |
1505 | & (ASAN_RED_ZONE_SIZE - 1)) == 0)((void)(!(((m_prev_offset - m_original_offset) & (32 - 1) ) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1505, __FUNCTION__), 0 : 0)); |
1506 | |
1507 | /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */ |
1508 | unsigned l = m_shadow_bytes.length (); |
1509 | for (unsigned i = 0; i <= RZ_BUFFER_SIZE4 - l; i++) |
1510 | m_shadow_bytes.safe_push (0); |
1511 | |
1512 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1513 | fprintf (dump_file, |
1514 | "Flushing rzbuffer at offset %" PRId64"l" "d" " with: ", m_prev_offset); |
1515 | |
1516 | unsigned HOST_WIDE_INTlong val = 0; |
1517 | for (unsigned i = 0; i < RZ_BUFFER_SIZE4; i++) |
1518 | { |
1519 | unsigned char v |
1520 | = m_shadow_bytes[BYTES_BIG_ENDIAN0 ? RZ_BUFFER_SIZE4 - i - 1 : i]; |
1521 | val |= (unsigned HOST_WIDE_INTlong)v << (BITS_PER_UNIT(8) * i); |
1522 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1523 | fprintf (dump_file, "%02x ", v); |
1524 | } |
1525 | |
1526 | if (dump_file && (dump_flags & TDF_DETAILS)) |
1527 | fprintf (dump_file, "\n"); |
1528 | |
1529 | rtx c = gen_int_mode (val, SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))); |
1530 | m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0)adjust_address_1 (m_shadow_mem, (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)), 0, 1, 1, 0, 0); |
1531 | emit_move_insn (m_shadow_mem, c); |
1532 | m_shadow_bytes.truncate (0); |
1533 | } |
1534 | |
1535 | /* Flush if the content of the buffer is full |
1536 | (equal to RZ_BUFFER_SIZE). */ |
1537 | |
1538 | void |
1539 | asan_redzone_buffer::flush_if_full (void) |
1540 | { |
1541 | if (m_shadow_bytes.length () == RZ_BUFFER_SIZE4) |
1542 | flush_redzone_payload (); |
1543 | } |
1544 | |
1545 | |
1546 | /* HWAddressSanitizer (hwasan) is a probabilistic method for detecting |
1547 | out-of-bounds and use-after-free bugs. |
1548 | Read more: |
1549 | http://code.google.com/p/address-sanitizer/ |
1550 | |
1551 | Similar to AddressSanitizer (asan) it consists of two parts: the |
1552 | instrumentation module in this file, and a run-time library. |
1553 | |
1554 | The instrumentation module adds a run-time check before every memory insn in |
1555 | the same manner as asan (see the block comment for AddressSanitizer above). |
1556 | Currently, hwasan only adds out-of-line instrumentation, where each check is |
1557 | implemented as a function call to the run-time library. Hence a check for a |
1558 | load of N bytes from address X would be implemented with a function call to |
1559 | __hwasan_loadN(X), and checking a store of N bytes from address X would be |
1560 | implemented with a function call to __hwasan_storeN(X). |
1561 | |
1562 | The main difference between hwasan and asan is in the information stored to |
1563 | help this checking. Both sanitizers use a shadow memory area which stores |
1564 | data recording the state of main memory at a corresponding address. |
1565 | |
1566 | For hwasan, each 16 byte granule in main memory has a corresponding 1 byte |
1567 | in shadow memory. This shadow address can be calculated with equation: |
1568 | (addr >> log_2(HWASAN_TAG_GRANULE_SIZE)) |
1569 | + __hwasan_shadow_memory_dynamic_address; |
1570 | The conversion between real and shadow memory for asan is given in the block |
1571 | comment at the top of this file. |
1572 | The description of how this shadow memory is laid out for asan is in the |
1573 | block comment at the top of this file, here we describe how this shadow |
1574 | memory is used for hwasan. |
1575 | |
1576 | For hwasan, each variable is assigned a byte-sized 'tag'. The extent of |
1577 | the shadow memory for that variable is filled with the assigned tag, and |
1578 | every pointer referencing that variable has its top byte set to the same |
1579 | tag. The run-time library redefines malloc so that every allocation returns |
1580 | a tagged pointer and tags the corresponding shadow memory with the same tag. |
1581 | |
1582 | On each pointer dereference the tag found in the pointer is compared to the |
1583 | tag found in the shadow memory corresponding to the accessed memory address. |
1584 | If these tags are found to differ then this memory access is judged to be |
1585 | invalid and a report is generated. |
1586 | |
1587 | This method of bug detection is not perfect -- it can not catch every bad |
1588 | access -- but catches them probabilistically instead. There is always the |
1589 | possibility that an invalid memory access will happen to access memory |
1590 | tagged with the same tag as the pointer that this access used. |
1591 | The chances of this are approx. 0.4% for any two uncorrelated objects. |
1592 | |
1593 | Random tag generation can mitigate this problem by decreasing the |
1594 | probability that an invalid access will be missed in the same manner over |
1595 | multiple runs. i.e. if two objects are tagged the same in one run of the |
1596 | binary they are unlikely to be tagged the same in the next run. |
1597 | Both heap and stack allocated objects have random tags by default. |
1598 | |
1599 | [16 byte granule implications] |
1600 | Since the shadow memory only has a resolution on real memory of 16 bytes, |
1601 | invalid accesses that are within the same 16 byte granule as a valid |
1602 | address will not be caught. |
1603 | |
1604 | There is a "short-granule" feature in the runtime library which does catch |
1605 | such accesses, but this feature is not implemented for stack objects (since |
1606 | stack objects are allocated and tagged by compiler instrumentation, and |
1607 | this feature has not yet been implemented in GCC instrumentation). |
1608 | |
1609 | Another outcome of this 16 byte resolution is that each tagged object must |
1610 | be 16 byte aligned. If two objects were to share any 16 byte granule in |
1611 | memory, then they both would have to be given the same tag, and invalid |
1612 | accesses to one using a pointer to the other would be undetectable. |
1613 | |
1614 | [Compiler instrumentation] |
1615 | Compiler instrumentation ensures that two adjacent buffers on the stack are |
1616 | given different tags, this means an access to one buffer using a pointer |
1617 | generated from the other (e.g. through buffer overrun) will have mismatched |
1618 | tags and be caught by hwasan. |
1619 | |
1620 | We don't randomly tag every object on the stack, since that would require |
1621 | keeping many registers to record each tag. Instead we randomly generate a |
1622 | tag for each function frame, and each new stack object uses a tag offset |
1623 | from that frame tag. |
1624 | i.e. each object is tagged as RFT + offset, where RFT is the "random frame |
1625 | tag" generated for this frame. |
1626 | This means that randomisation does not peturb the difference between tags |
1627 | on tagged stack objects within a frame, but this is mitigated by the fact |
1628 | that objects with the same tag within a frame are very far apart |
1629 | (approx. 2^HWASAN_TAG_SIZE objects apart). |
1630 | |
1631 | As a demonstration, using the same example program as in the asan block |
1632 | comment above: |
1633 | |
1634 | int |
1635 | foo () |
1636 | { |
1637 | char a[23] = {0}; |
1638 | int b[2] = {0}; |
1639 | |
1640 | a[5] = 1; |
1641 | b[1] = 2; |
1642 | |
1643 | return a[5] + b[1]; |
1644 | } |
1645 | |
1646 | On AArch64 the stack will be ordered as follows for the above function: |
1647 | |
1648 | Slot 1/ [24 bytes for variable 'a'] |
1649 | Slot 2/ [8 bytes padding for alignment] |
1650 | Slot 3/ [8 bytes for variable 'b'] |
1651 | Slot 4/ [8 bytes padding for alignment] |
1652 | |
1653 | (The padding is there to ensure 16 byte alignment as described in the 16 |
1654 | byte granule implications). |
1655 | |
1656 | While the shadow memory will be ordered as follows: |
1657 | |
1658 | - 2 bytes (representing 32 bytes in real memory) tagged with RFT + 1. |
1659 | - 1 byte (representing 16 bytes in real memory) tagged with RFT + 2. |
1660 | |
1661 | And any pointer to "a" will have the tag RFT + 1, and any pointer to "b" |
1662 | will have the tag RFT + 2. |
1663 | |
1664 | [Top Byte Ignore requirements] |
1665 | Hwasan requires the ability to store an 8 bit tag in every pointer. There |
1666 | is no instrumentation done to remove this tag from pointers before |
1667 | dereferencing, which means the hardware must ignore this tag during memory |
1668 | accesses. |
1669 | |
1670 | Architectures where this feature is available should indicate this using |
1671 | the TARGET_MEMTAG_CAN_TAG_ADDRESSES hook. |
1672 | |
1673 | [Stack requires cleanup on unwinding] |
1674 | During normal operation of a hwasan sanitized program more space in the |
1675 | shadow memory becomes tagged as the stack grows. As the stack shrinks this |
1676 | shadow memory space must become untagged. If it is not untagged then when |
1677 | the stack grows again (during other function calls later on in the program) |
1678 | objects on the stack that are usually not tagged (e.g. parameters passed on |
1679 | the stack) can be placed in memory whose shadow space is tagged with |
1680 | something else, and accesses can cause false positive reports. |
1681 | |
1682 | Hence we place untagging code on every epilogue of functions which tag some |
1683 | stack objects. |
1684 | |
1685 | Moreover, the run-time library intercepts longjmp & setjmp to untag when |
1686 | the stack is unwound this way. |
1687 | |
1688 | C++ exceptions are not yet handled, which means this sanitizer can not |
1689 | handle C++ code that throws exceptions -- it will give false positives |
1690 | after an exception has been thrown. The implementation that the hwasan |
1691 | library has for handling these relies on the frame pointer being after any |
1692 | local variables. This is not generally the case for GCC. */ |
1693 | |
1694 | |
1695 | /* Returns whether we are tagging pointers and checking those tags on memory |
1696 | access. */ |
1697 | bool |
1698 | hwasan_sanitize_p () |
1699 | { |
1700 | return sanitize_flags_p (SANITIZE_HWADDRESS); |
1701 | } |
1702 | |
1703 | /* Are we tagging the stack? */ |
1704 | bool |
1705 | hwasan_sanitize_stack_p () |
1706 | { |
1707 | return (hwasan_sanitize_p () && param_hwasan_instrument_stackglobal_options.x_param_hwasan_instrument_stack); |
1708 | } |
1709 | |
1710 | /* Are we tagging alloca objects? */ |
1711 | bool |
1712 | hwasan_sanitize_allocas_p (void) |
1713 | { |
1714 | return (hwasan_sanitize_stack_p () && param_hwasan_instrument_allocasglobal_options.x_param_hwasan_instrument_allocas); |
1715 | } |
1716 | |
1717 | /* Should we instrument reads? */ |
1718 | bool |
1719 | hwasan_instrument_reads (void) |
1720 | { |
1721 | return (hwasan_sanitize_p () && param_hwasan_instrument_readsglobal_options.x_param_hwasan_instrument_reads); |
1722 | } |
1723 | |
1724 | /* Should we instrument writes? */ |
1725 | bool |
1726 | hwasan_instrument_writes (void) |
1727 | { |
1728 | return (hwasan_sanitize_p () && param_hwasan_instrument_writesglobal_options.x_param_hwasan_instrument_writes); |
1729 | } |
1730 | |
1731 | /* Should we instrument builtin calls? */ |
1732 | bool |
1733 | hwasan_memintrin (void) |
1734 | { |
1735 | return (hwasan_sanitize_p () && param_hwasan_instrument_mem_intrinsicsglobal_options.x_param_hwasan_instrument_mem_intrinsics); |
1736 | } |
1737 | |
1738 | /* Insert code to protect stack vars. The prologue sequence should be emitted |
1739 | directly, epilogue sequence returned. BASE is the register holding the |
1740 | stack base, against which OFFSETS array offsets are relative to, OFFSETS |
1741 | array contains pairs of offsets in reverse order, always the end offset |
1742 | of some gap that needs protection followed by starting offset, |
1743 | and DECLS is an array of representative decls for each var partition. |
1744 | LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1 |
1745 | elements long (OFFSETS include gap before the first variable as well |
1746 | as gaps after each stack variable). PBASE is, if non-NULL, some pseudo |
1747 | register which stack vars DECL_RTLs are based on. Either BASE should be |
1748 | assigned to PBASE, when not doing use after return protection, or |
1749 | corresponding address based on __asan_stack_malloc* return value. */ |
1750 | |
1751 | rtx_insn * |
1752 | asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb, |
1753 | HOST_WIDE_INTlong *offsets, tree *decls, int length) |
1754 | { |
1755 | rtx shadow_base, shadow_mem, ret, mem, orig_base; |
1756 | rtx_code_label *lab; |
1757 | rtx_insn *insns; |
1758 | char buf[32]; |
1759 | HOST_WIDE_INTlong base_offset = offsets[length - 1]; |
1760 | HOST_WIDE_INTlong base_align_bias = 0, offset, prev_offset; |
1761 | HOST_WIDE_INTlong asan_frame_size = offsets[0] - base_offset; |
1762 | HOST_WIDE_INTlong last_offset, last_size, last_size_aligned; |
1763 | int l; |
1764 | unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT0xf1; |
1765 | tree str_cst, decl, id; |
1766 | int use_after_return_class = -1; |
1767 | |
1768 | if (shadow_ptr_types[0] == NULL_TREE(tree) nullptr) |
1769 | asan_init_shadow_ptr_types (); |
1770 | |
1771 | expanded_location cfun_xloc |
1772 | = expand_location (DECL_SOURCE_LOCATION (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_MINIMAL ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1772, __FUNCTION__))->decl_minimal.locus)); |
1773 | |
1774 | /* First of all, prepare the description string. */ |
1775 | pretty_printer asan_pp; |
1776 | |
1777 | pp_decimal_int (&asan_pp, length / 2 - 1)do { sprintf ((&asan_pp)->buffer->digit_buffer, "%d" , length / 2 - 1); pp_string (&asan_pp, (&asan_pp)-> buffer->digit_buffer); } while (0); |
1778 | pp_space (&asan_pp)pp_character (&asan_pp, ' '); |
1779 | for (l = length - 2; l; l -= 2) |
1780 | { |
1781 | tree decl = decls[l / 2 - 1]; |
1782 | pp_wide_integer (&asan_pp, offsets[l] - base_offset); |
1783 | pp_space (&asan_pp)pp_character (&asan_pp, ' '); |
1784 | pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]); |
1785 | pp_space (&asan_pp)pp_character (&asan_pp, ' '); |
1786 | |
1787 | expanded_location xloc |
1788 | = expand_location (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1788, __FUNCTION__))->decl_minimal.locus)); |
1789 | char location[32]; |
1790 | |
1791 | if (xloc.file == cfun_xloc.file) |
1792 | sprintf (location, ":%d", xloc.line); |
1793 | else |
1794 | location[0] = '\0'; |
1795 | |
1796 | if (DECL_P (decl)(tree_code_type[(int) (((enum tree_code) (decl)->base.code ))] == tcc_declaration) && DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1796, __FUNCTION__))->decl_minimal.name)) |
1797 | { |
1798 | unsigned idlen |
1799 | = IDENTIFIER_LENGTH (DECL_NAME (decl))((tree_check ((((contains_struct_check ((decl), (TS_DECL_MINIMAL ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1799, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1799, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.len ) + strlen (location); |
1800 | pp_decimal_int (&asan_pp, idlen)do { sprintf ((&asan_pp)->buffer->digit_buffer, "%d" , idlen); pp_string (&asan_pp, (&asan_pp)->buffer-> digit_buffer); } while (0); |
1801 | pp_space (&asan_pp)pp_character (&asan_pp, ' '); |
1802 | pp_tree_identifier (&asan_pp, DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1802, __FUNCTION__))->decl_minimal.name)); |
1803 | pp_string (&asan_pp, location); |
1804 | } |
1805 | else |
1806 | pp_string (&asan_pp, "9 <unknown>"); |
1807 | |
1808 | if (l > 2) |
1809 | pp_space (&asan_pp)pp_character (&asan_pp, ' '); |
1810 | } |
1811 | str_cst = asan_pp_string (&asan_pp); |
1812 | |
1813 | /* Emit the prologue sequence. */ |
1814 | if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase |
1815 | && param_asan_use_after_returnglobal_options.x_param_asan_use_after_return) |
1816 | { |
1817 | use_after_return_class = floor_log2 (asan_frame_size - 1) - 5; |
1818 | /* __asan_stack_malloc_N guarantees alignment |
1819 | N < 6 ? (64 << N) : 4096 bytes. */ |
1820 | if (alignb > (use_after_return_class < 6 |
1821 | ? (64U << use_after_return_class) : 4096U)) |
1822 | use_after_return_class = -1; |
1823 | else if (alignb > ASAN_RED_ZONE_SIZE32 && (asan_frame_size & (alignb - 1))) |
1824 | base_align_bias = ((asan_frame_size + alignb - 1) |
1825 | & ~(alignb - HOST_WIDE_INT_11L)) - asan_frame_size; |
1826 | } |
1827 | |
1828 | /* Align base if target is STRICT_ALIGNMENT. */ |
1829 | if (STRICT_ALIGNMENT0) |
1830 | { |
1831 | const HOST_WIDE_INTlong align |
1832 | = (GET_MODE_ALIGNMENT (SImode)get_mode_alignment ((scalar_int_mode ((scalar_int_mode::from_int ) E_SImode))) / BITS_PER_UNIT(8)) << ASAN_SHADOW_SHIFT3; |
1833 | base = expand_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), and_optab, base, gen_int_mode (-align, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))), |
1834 | NULL_RTX(rtx) 0, 1, OPTAB_DIRECT); |
1835 | } |
1836 | |
1837 | if (use_after_return_class == -1 && pbase) |
1838 | emit_move_insn (pbase, base); |
1839 | |
1840 | base = expand_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), add_optab, base, |
1841 | gen_int_mode (base_offset - base_align_bias, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))), |
1842 | NULL_RTX(rtx) 0, 1, OPTAB_DIRECT); |
1843 | orig_base = NULL_RTX(rtx) 0; |
1844 | if (use_after_return_class != -1) |
1845 | { |
1846 | if (asan_detect_stack_use_after_return == NULL_TREE(tree) nullptr) |
1847 | { |
1848 | id = get_identifier ("__asan_option_detect_stack_use_after_return")(__builtin_constant_p ("__asan_option_detect_stack_use_after_return" ) ? get_identifier_with_length (("__asan_option_detect_stack_use_after_return" ), strlen ("__asan_option_detect_stack_use_after_return")) : get_identifier ("__asan_option_detect_stack_use_after_return")); |
1849 | decl = build_decl (BUILTINS_LOCATION((location_t) 1), VAR_DECL, id, |
1850 | integer_type_nodeinteger_types[itk_int]); |
1851 | SET_DECL_ASSEMBLER_NAME (decl, id)overwrite_decl_assembler_name (decl, id); |
1852 | TREE_ADDRESSABLE (decl)((decl)->base.addressable_flag) = 1; |
1853 | DECL_ARTIFICIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1853, __FUNCTION__))->decl_common.artificial_flag) = 1; |
1854 | DECL_IGNORED_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1854, __FUNCTION__))->decl_common.ignored_flag) = 1; |
1855 | DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1855, __FUNCTION__))->decl_common.decl_flag_1) = 1; |
1856 | TREE_STATIC (decl)((decl)->base.static_flag) = 1; |
1857 | TREE_PUBLIC (decl)((decl)->base.public_flag) = 1; |
1858 | TREE_USED (decl)((decl)->base.used_flag) = 1; |
1859 | asan_detect_stack_use_after_return = decl; |
1860 | } |
1861 | orig_base = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))); |
1862 | emit_move_insn (orig_base, base); |
1863 | ret = expand_normal (asan_detect_stack_use_after_return); |
1864 | lab = gen_label_rtx (); |
1865 | emit_cmp_and_jump_insns (ret, const0_rtx(const_int_rtx[64]), EQ, NULL_RTX(rtx) 0, |
1866 | VOIDmode((void) 0, E_VOIDmode), 0, lab, |
1867 | profile_probability::very_likely ()); |
1868 | snprintf (buf, sizeof buf, "__asan_stack_malloc_%d", |
1869 | use_after_return_class); |
1870 | ret = init_one_libfunc (buf); |
1871 | ret = emit_library_call_value (ret, NULL_RTX(rtx) 0, LCT_NORMAL, ptr_mode, |
1872 | GEN_INT (asan_frame_sizegen_rtx_CONST_INT (((void) 0, E_VOIDmode), (asan_frame_size + base_align_bias)) |
1873 | + base_align_bias)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (asan_frame_size + base_align_bias)), |
1874 | TYPE_MODE (pointer_sized_int_node)((((enum tree_code) ((tree_class_check ((global_trees[TI_POINTER_SIZED_TYPE ]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1874, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (global_trees[TI_POINTER_SIZED_TYPE]) : (global_trees[TI_POINTER_SIZED_TYPE ])->type_common.mode)); |
1875 | /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded |
1876 | and NULL otherwise. Check RET value is NULL here and jump over the |
1877 | BASE reassignment in this case. Otherwise, reassign BASE to RET. */ |
1878 | emit_cmp_and_jump_insns (ret, const0_rtx(const_int_rtx[64]), EQ, NULL_RTX(rtx) 0, |
1879 | VOIDmode((void) 0, E_VOIDmode), 0, lab, |
1880 | profile_probability:: very_unlikely ()); |
1881 | ret = convert_memory_address (Pmode, ret)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (ret), 0); |
1882 | emit_move_insn (base, ret); |
1883 | emit_label (lab); |
1884 | emit_move_insn (pbase, expand_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), add_optab, base, |
1885 | gen_int_mode (base_align_bias |
1886 | - base_offset, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode)))), |
1887 | NULL_RTX(rtx) 0, 1, OPTAB_DIRECT)); |
1888 | } |
1889 | mem = gen_rtx_MEM (ptr_mode, base); |
1890 | mem = adjust_address (mem, VOIDmode, base_align_bias)adjust_address_1 (mem, ((void) 0, E_VOIDmode), base_align_bias , 1, 1, 0, 0); |
1891 | emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC0x41b58ab3, ptr_mode)); |
1892 | mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode))adjust_address_1 (mem, ((void) 0, E_VOIDmode), GET_MODE_SIZE ( ptr_mode), 1, 1, 0, 0); |
1893 | emit_move_insn (mem, expand_normal (str_cst)); |
1894 | mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode))adjust_address_1 (mem, ((void) 0, E_VOIDmode), GET_MODE_SIZE ( ptr_mode), 1, 1, 0, 0); |
1895 | ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no)do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy ( &(buf)[2], "LASANPC"); sprint_ul (__p, (unsigned long) (( (cfun + 0)->funcdef_no))); } while (0); |
1896 | id = get_identifier (buf)(__builtin_constant_p (buf) ? get_identifier_with_length ((buf ), strlen (buf)) : get_identifier (buf)); |
1897 | decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl)((contains_struct_check ((current_function_decl), (TS_DECL_MINIMAL ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1897, __FUNCTION__))->decl_minimal.locus), |
1898 | VAR_DECL, id, char_type_nodeinteger_types[itk_char]); |
1899 | SET_DECL_ASSEMBLER_NAME (decl, id)overwrite_decl_assembler_name (decl, id); |
1900 | TREE_ADDRESSABLE (decl)((decl)->base.addressable_flag) = 1; |
1901 | TREE_READONLY (decl)((non_type_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1901, __FUNCTION__))->base.readonly_flag) = 1; |
1902 | DECL_ARTIFICIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1902, __FUNCTION__))->decl_common.artificial_flag) = 1; |
1903 | DECL_IGNORED_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1903, __FUNCTION__))->decl_common.ignored_flag) = 1; |
1904 | TREE_STATIC (decl)((decl)->base.static_flag) = 1; |
1905 | TREE_PUBLIC (decl)((decl)->base.public_flag) = 0; |
1906 | TREE_USED (decl)((decl)->base.used_flag) = 1; |
1907 | DECL_INITIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1907, __FUNCTION__))->decl_common.initial) = decl; |
1908 | TREE_ASM_WRITTEN (decl)((decl)->base.asm_written_flag) = 1; |
1909 | TREE_ASM_WRITTEN (id)((id)->base.asm_written_flag) = 1; |
1910 | emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)build_fold_addr_expr_loc (((location_t) 0), (decl)))); |
1911 | shadow_base = expand_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), lshr_optab, base, |
1912 | gen_int_shift_amount (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), ASAN_SHADOW_SHIFT3), |
1913 | NULL_RTX(rtx) 0, 1, OPTAB_DIRECT); |
1914 | shadow_base |
1915 | = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), shadow_base, |
1916 | asan_shadow_offset () |
1917 | + (base_align_bias >> ASAN_SHADOW_SHIFT3)); |
1918 | gcc_assert (asan_shadow_set != -1((void)(!(asan_shadow_set != -1 && (32 >> 3) == 4) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1919, __FUNCTION__), 0 : 0)) |
1919 | && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4)((void)(!(asan_shadow_set != -1 && (32 >> 3) == 4) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1919, __FUNCTION__), 0 : 0)); |
1920 | shadow_mem = gen_rtx_MEM (SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)), shadow_base); |
1921 | set_mem_alias_set (shadow_mem, asan_shadow_set); |
1922 | if (STRICT_ALIGNMENT0) |
1923 | set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)get_mode_alignment ((scalar_int_mode ((scalar_int_mode::from_int ) E_SImode))))); |
1924 | prev_offset = base_offset; |
1925 | |
1926 | asan_redzone_buffer rz_buffer (shadow_mem, prev_offset); |
1927 | for (l = length; l; l -= 2) |
1928 | { |
1929 | if (l == 2) |
1930 | cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT0xf3; |
1931 | offset = offsets[l - 1]; |
1932 | |
1933 | bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY(1UL << 3) - 1); |
1934 | /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then |
1935 | the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0. |
1936 | In that case we have to emit one extra byte that will describe |
1937 | how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */ |
1938 | if (extra_byte) |
1939 | { |
1940 | HOST_WIDE_INTlong aoff |
1941 | = base_offset + ((offset - base_offset) |
1942 | & ~(ASAN_SHADOW_GRANULARITY(1UL << 3) - HOST_WIDE_INT_11L)); |
1943 | rz_buffer.emit_redzone_byte (aoff, offset - aoff); |
1944 | offset = aoff + ASAN_SHADOW_GRANULARITY(1UL << 3); |
1945 | } |
1946 | |
1947 | /* Calculate size of red zone payload. */ |
1948 | while (offset < offsets[l - 2]) |
1949 | { |
1950 | rz_buffer.emit_redzone_byte (offset, cur_shadow_byte); |
1951 | offset += ASAN_SHADOW_GRANULARITY(1UL << 3); |
1952 | } |
1953 | |
1954 | cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE0xf2; |
1955 | } |
1956 | |
1957 | /* As the automatic variables are aligned to |
1958 | ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be |
1959 | flushed here. */ |
1960 | gcc_assert (rz_buffer.m_shadow_bytes.is_empty ())((void)(!(rz_buffer.m_shadow_bytes.is_empty ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 1960, __FUNCTION__), 0 : 0)); |
1961 | |
1962 | do_pending_stack_adjust (); |
1963 | |
1964 | /* Construct epilogue sequence. */ |
1965 | start_sequence (); |
1966 | |
1967 | lab = NULLnullptr; |
1968 | if (use_after_return_class != -1) |
1969 | { |
1970 | rtx_code_label *lab2 = gen_label_rtx (); |
1971 | char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET0xf5; |
1972 | emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX(rtx) 0, |
1973 | VOIDmode((void) 0, E_VOIDmode), 0, lab2, |
1974 | profile_probability::very_likely ()); |
1975 | shadow_mem = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), shadow_base); |
1976 | set_mem_alias_set (shadow_mem, asan_shadow_set); |
1977 | mem = gen_rtx_MEM (ptr_mode, base); |
1978 | mem = adjust_address (mem, VOIDmode, base_align_bias)adjust_address_1 (mem, ((void) 0, E_VOIDmode), base_align_bias , 1, 1, 0, 0); |
1979 | emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC0x45e0360e, ptr_mode)); |
1980 | unsigned HOST_WIDE_INTlong sz = asan_frame_size >> ASAN_SHADOW_SHIFT3; |
1981 | if (use_after_return_class < 5 |
1982 | && can_store_by_pieces (sz, builtin_memset_read_str, &c, |
1983 | BITS_PER_UNIT(8), true)) |
1984 | { |
1985 | /* Emit: |
1986 | memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize); |
1987 | **SavedFlagPtr(FakeStack, class_id) = 0 |
1988 | */ |
1989 | store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c, |
1990 | BITS_PER_UNIT(8), true, RETURN_BEGIN); |
1991 | |
1992 | unsigned HOST_WIDE_INTlong offset |
1993 | = (1 << (use_after_return_class + 6)); |
1994 | offset -= GET_MODE_SIZE (ptr_mode); |
1995 | mem = gen_rtx_MEM (ptr_mode, base); |
1996 | mem = adjust_address (mem, ptr_mode, offset)adjust_address_1 (mem, ptr_mode, offset, 1, 1, 0, 0); |
1997 | rtx addr = gen_reg_rtx (ptr_mode); |
1998 | emit_move_insn (addr, mem); |
1999 | addr = convert_memory_address (Pmode, addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode )) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) )), (addr), 0); |
2000 | mem = gen_rtx_MEM (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), addr); |
2001 | emit_move_insn (mem, const0_rtx(const_int_rtx[64])); |
2002 | } |
2003 | else if (use_after_return_class >= 5 |
2004 | || !set_storage_via_setmem (shadow_mem, |
2005 | GEN_INT (sz)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (sz)), |
2006 | gen_int_mode (c, QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode))), |
2007 | BITS_PER_UNIT(8), BITS_PER_UNIT(8), |
2008 | -1, sz, sz, sz)) |
2009 | { |
2010 | snprintf (buf, sizeof buf, "__asan_stack_free_%d", |
2011 | use_after_return_class); |
2012 | ret = init_one_libfunc (buf); |
2013 | rtx addr = convert_memory_address (ptr_mode, base)convert_memory_address_addr_space ((ptr_mode), (base), 0); |
2014 | rtx orig_addr = convert_memory_address (ptr_mode, orig_base)convert_memory_address_addr_space ((ptr_mode), (orig_base), 0 ); |
2015 | emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode, |
2016 | GEN_INT (asan_frame_size + base_align_bias)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (asan_frame_size + base_align_bias)), |
2017 | TYPE_MODE (pointer_sized_int_node)((((enum tree_code) ((tree_class_check ((global_trees[TI_POINTER_SIZED_TYPE ]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2017, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (global_trees[TI_POINTER_SIZED_TYPE]) : (global_trees[TI_POINTER_SIZED_TYPE ])->type_common.mode), |
2018 | orig_addr, ptr_mode); |
2019 | } |
2020 | lab = gen_label_rtx (); |
2021 | emit_jump (lab); |
2022 | emit_label (lab2); |
2023 | } |
2024 | |
2025 | shadow_mem = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), shadow_base); |
2026 | set_mem_alias_set (shadow_mem, asan_shadow_set); |
2027 | |
2028 | if (STRICT_ALIGNMENT0) |
2029 | set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)get_mode_alignment ((scalar_int_mode ((scalar_int_mode::from_int ) E_SImode))))); |
2030 | |
2031 | prev_offset = base_offset; |
2032 | last_offset = base_offset; |
2033 | last_size = 0; |
2034 | last_size_aligned = 0; |
2035 | for (l = length; l; l -= 2) |
2036 | { |
2037 | offset = base_offset + ((offsets[l - 1] - base_offset) |
2038 | & ~(ASAN_RED_ZONE_SIZE32 - HOST_WIDE_INT_11L)); |
2039 | if (last_offset + last_size_aligned < offset) |
2040 | { |
2041 | shadow_mem = adjust_address (shadow_mem, VOIDmode,adjust_address_1 (shadow_mem, ((void) 0, E_VOIDmode), (last_offset - prev_offset) >> 3, 1, 1, 0, 0) |
2042 | (last_offset - prev_offset)adjust_address_1 (shadow_mem, ((void) 0, E_VOIDmode), (last_offset - prev_offset) >> 3, 1, 1, 0, 0) |
2043 | >> ASAN_SHADOW_SHIFT)adjust_address_1 (shadow_mem, ((void) 0, E_VOIDmode), (last_offset - prev_offset) >> 3, 1, 1, 0, 0); |
2044 | prev_offset = last_offset; |
2045 | asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT3); |
2046 | last_offset = offset; |
2047 | last_size = 0; |
2048 | } |
2049 | else |
2050 | last_size = offset - last_offset; |
2051 | last_size += base_offset + ((offsets[l - 2] - base_offset) |
2052 | & ~(ASAN_MIN_RED_ZONE_SIZE16 - HOST_WIDE_INT_11L)) |
2053 | - offset; |
2054 | |
2055 | /* Unpoison shadow memory that corresponds to a variable that is |
2056 | is subject of use-after-return sanitization. */ |
2057 | if (l > 2) |
2058 | { |
2059 | decl = decls[l / 2 - 2]; |
2060 | if (asan_handled_variables != NULLnullptr |
2061 | && asan_handled_variables->contains (decl)) |
2062 | { |
2063 | HOST_WIDE_INTlong size = offsets[l - 3] - offsets[l - 2]; |
2064 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2065 | { |
2066 | const char *n = (DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2066, __FUNCTION__))->decl_minimal.name) |
2067 | ? IDENTIFIER_POINTER (DECL_NAME (decl))((const char *) (tree_check ((((contains_struct_check ((decl) , (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2067, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2067, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str ) |
2068 | : "<unknown>"); |
2069 | fprintf (dump_file, "Unpoisoning shadow stack for variable: " |
2070 | "%s (%" PRId64"l" "d" " B)\n", n, size); |
2071 | } |
2072 | |
2073 | last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE16 - HOST_WIDE_INT_11L); |
2074 | } |
2075 | } |
2076 | last_size_aligned |
2077 | = ((last_size + (ASAN_RED_ZONE_SIZE32 - HOST_WIDE_INT_11L)) |
2078 | & ~(ASAN_RED_ZONE_SIZE32 - HOST_WIDE_INT_11L)); |
2079 | } |
2080 | if (last_size_aligned) |
2081 | { |
2082 | shadow_mem = adjust_address (shadow_mem, VOIDmode,adjust_address_1 (shadow_mem, ((void) 0, E_VOIDmode), (last_offset - prev_offset) >> 3, 1, 1, 0, 0) |
2083 | (last_offset - prev_offset)adjust_address_1 (shadow_mem, ((void) 0, E_VOIDmode), (last_offset - prev_offset) >> 3, 1, 1, 0, 0) |
2084 | >> ASAN_SHADOW_SHIFT)adjust_address_1 (shadow_mem, ((void) 0, E_VOIDmode), (last_offset - prev_offset) >> 3, 1, 1, 0, 0); |
2085 | asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT3); |
2086 | } |
2087 | |
2088 | /* Clean-up set with instrumented stack variables. */ |
2089 | delete asan_handled_variables; |
2090 | asan_handled_variables = NULLnullptr; |
2091 | delete asan_used_labels; |
2092 | asan_used_labels = NULLnullptr; |
2093 | |
2094 | do_pending_stack_adjust (); |
2095 | if (lab) |
2096 | emit_label (lab); |
2097 | |
2098 | insns = get_insns (); |
2099 | end_sequence (); |
2100 | return insns; |
2101 | } |
2102 | |
2103 | /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds |
2104 | to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE |
2105 | indicates whether we're emitting new instructions sequence or not. */ |
2106 | |
2107 | rtx_insn * |
2108 | asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before) |
2109 | { |
2110 | if (before) |
2111 | push_to_sequence (before); |
2112 | else |
2113 | start_sequence (); |
2114 | rtx ret = init_one_libfunc ("__asan_allocas_unpoison"); |
2115 | top = convert_memory_address (ptr_mode, top)convert_memory_address_addr_space ((ptr_mode), (top), 0); |
2116 | bot = convert_memory_address (ptr_mode, bot)convert_memory_address_addr_space ((ptr_mode), (bot), 0); |
2117 | emit_library_call (ret, LCT_NORMAL, ptr_mode, |
2118 | top, ptr_mode, bot, ptr_mode); |
2119 | |
2120 | do_pending_stack_adjust (); |
2121 | rtx_insn *insns = get_insns (); |
2122 | end_sequence (); |
2123 | return insns; |
2124 | } |
2125 | |
2126 | /* Return true if DECL, a global var, might be overridden and needs |
2127 | therefore a local alias. */ |
2128 | |
2129 | static bool |
2130 | asan_needs_local_alias (tree decl) |
2131 | { |
2132 | return DECL_WEAK (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2132, __FUNCTION__))->decl_with_vis.weak_flag) || !targetm.binds_local_p (decl); |
2133 | } |
2134 | |
2135 | /* Return true if DECL, a global var, is an artificial ODR indicator symbol |
2136 | therefore doesn't need protection. */ |
2137 | |
2138 | static bool |
2139 | is_odr_indicator (tree decl) |
2140 | { |
2141 | return (DECL_ARTIFICIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2141, __FUNCTION__))->decl_common.artificial_flag) |
2142 | && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2142, __FUNCTION__))->decl_common.attributes))); |
2143 | } |
2144 | |
2145 | /* Return true if DECL is a VAR_DECL that should be protected |
2146 | by Address Sanitizer, by appending a red zone with protected |
2147 | shadow memory after it and aligning it to at least |
2148 | ASAN_RED_ZONE_SIZE bytes. */ |
2149 | |
2150 | bool |
2151 | asan_protect_global (tree decl, bool ignore_decl_rtl_set_p) |
2152 | { |
2153 | if (!param_asan_globalsglobal_options.x_param_asan_globals) |
2154 | return false; |
2155 | |
2156 | rtx rtl, symbol; |
2157 | |
2158 | if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == STRING_CST) |
2159 | { |
2160 | /* Instrument all STRING_CSTs except those created |
2161 | by asan_pp_string here. */ |
2162 | if (shadow_ptr_types[0] != NULL_TREE(tree) nullptr |
2163 | && TREE_CODE (TREE_TYPE (decl))((enum tree_code) (((contains_struct_check ((decl), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2163, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE |
2164 | && TREE_TYPE (TREE_TYPE (decl))((contains_struct_check ((((contains_struct_check ((decl), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2164, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2164, __FUNCTION__))->typed.type) == TREE_TYPE (shadow_ptr_types[0])((contains_struct_check ((shadow_ptr_types[0]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2164, __FUNCTION__))->typed.type)) |
2165 | return false; |
2166 | return true; |
2167 | } |
2168 | if (!VAR_P (decl)(((enum tree_code) (decl)->base.code) == VAR_DECL) |
2169 | /* TLS vars aren't statically protectable. */ |
2170 | || DECL_THREAD_LOCAL_P (decl)((((decl)->base.static_flag) || ((contains_struct_check (( decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2170, __FUNCTION__))->decl_common.decl_flag_1)) && decl_tls_model (decl) >= TLS_MODEL_REAL) |
2171 | /* Externs will be protected elsewhere. */ |
2172 | || DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2172, __FUNCTION__))->decl_common.decl_flag_1) |
2173 | /* PR sanitizer/81697: For architectures that use section anchors first |
2174 | call to asan_protect_global may occur before DECL_RTL (decl) is set. |
2175 | We should ignore DECL_RTL_SET_P then, because otherwise the first call |
2176 | to asan_protect_global will return FALSE and the following calls on the |
2177 | same decl after setting DECL_RTL (decl) will return TRUE and we'll end |
2178 | up with inconsistency at runtime. */ |
2179 | || (!DECL_RTL_SET_P (decl)(((tree_contains_struct[(((enum tree_code) (decl)->base.code ))][(TS_DECL_WRTL)])) && (contains_struct_check ((decl ), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2179, __FUNCTION__))->decl_with_rtl.rtl != nullptr) && !ignore_decl_rtl_set_p) |
2180 | /* Comdat vars pose an ABI problem, we can't know if |
2181 | the var that is selected by the linker will have |
2182 | padding or not. */ |
2183 | || DECL_ONE_ONLY (decl)(decl_comdat_group (decl) != (tree) nullptr && (((decl )->base.public_flag) || ((contains_struct_check ((decl), ( TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2183, __FUNCTION__))->decl_common.decl_flag_1))) |
2184 | /* Similarly for common vars. People can use -fno-common. |
2185 | Note: Linux kernel is built with -fno-common, so we do instrument |
2186 | globals there even if it is C. */ |
2187 | || (DECL_COMMON (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2187, __FUNCTION__))->decl_with_vis.common_flag) && TREE_PUBLIC (decl)((decl)->base.public_flag)) |
2188 | /* Don't protect if using user section, often vars placed |
2189 | into user section from multiple TUs are then assumed |
2190 | to be an array of such vars, putting padding in there |
2191 | breaks this assumption. */ |
2192 | || (DECL_SECTION_NAME (decl)decl_section_name (decl) != NULLnullptr |
2193 | && !symtab_node::get (decl)->implicit_section |
2194 | && !section_sanitized_p (DECL_SECTION_NAME (decl)decl_section_name (decl))) |
2195 | || DECL_SIZE (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2195, __FUNCTION__))->decl_common.size) == 0 |
2196 | || ASAN_RED_ZONE_SIZE32 * BITS_PER_UNIT(8) > MAX_OFILE_ALIGNMENT(((unsigned int) 1 << 28) * 8) |
2197 | || TREE_CODE (DECL_SIZE_UNIT (decl))((enum tree_code) (((contains_struct_check ((decl), (TS_DECL_COMMON ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2197, __FUNCTION__))->decl_common.size_unit))->base.code ) != INTEGER_CST |
2198 | || !valid_constant_size_p (DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2198, __FUNCTION__))->decl_common.size_unit)) |
2199 | || DECL_ALIGN_UNIT (decl)((((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2199, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2199, __FUNCTION__))->decl_common.align) - 1) : 0) / (8) ) > 2 * ASAN_RED_ZONE_SIZE32 |
2200 | || TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2200, __FUNCTION__))->typed.type) == ubsan_get_source_location_type () |
2201 | || is_odr_indicator (decl)) |
2202 | return false; |
2203 | |
2204 | if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl)(((tree_contains_struct[(((enum tree_code) (decl)->base.code ))][(TS_DECL_WRTL)])) && (contains_struct_check ((decl ), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2204, __FUNCTION__))->decl_with_rtl.rtl != nullptr)) |
2205 | { |
2206 | |
2207 | rtl = DECL_RTL (decl)((contains_struct_check ((decl), (TS_DECL_WRTL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2207, __FUNCTION__))->decl_with_rtl.rtl ? (decl)->decl_with_rtl .rtl : (make_decl_rtl (decl), (decl)->decl_with_rtl.rtl)); |
2208 | if (!MEM_P (rtl)(((enum rtx_code) (rtl)->code) == MEM) || GET_CODE (XEXP (rtl, 0))((enum rtx_code) ((((rtl)->u.fld[0]).rt_rtx))->code) != SYMBOL_REF) |
2209 | return false; |
2210 | symbol = XEXP (rtl, 0)(((rtl)->u.fld[0]).rt_rtx); |
2211 | |
2212 | if (CONSTANT_POOL_ADDRESS_P (symbol)(__extension__ ({ __typeof ((symbol)) const _rtx = ((symbol)) ; if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag ("CONSTANT_POOL_ADDRESS_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2212, __FUNCTION__); _rtx; })->unchanging) |
2213 | || TREE_CONSTANT_POOL_ADDRESS_P (symbol)(__extension__ ({ __typeof ((symbol)) const _rtx = ((symbol)) ; if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag ("TREE_CONSTANT_POOL_ADDRESS_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2213, __FUNCTION__); _rtx; })->frame_related)) |
2214 | return false; |
2215 | } |
2216 | |
2217 | if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2217, __FUNCTION__))->decl_common.attributes))) |
2218 | return false; |
2219 | |
2220 | if (!TARGET_SUPPORTS_ALIASES1 && asan_needs_local_alias (decl)) |
2221 | return false; |
2222 | |
2223 | return true; |
2224 | } |
2225 | |
2226 | /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}. |
2227 | IS_STORE is either 1 (for a store) or 0 (for a load). */ |
2228 | |
2229 | static tree |
2230 | report_error_func (bool is_store, bool recover_p, HOST_WIDE_INTlong size_in_bytes, |
2231 | int *nargs) |
2232 | { |
2233 | gcc_assert (!hwasan_sanitize_p ())((void)(!(!hwasan_sanitize_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2233, __FUNCTION__), 0 : 0)); |
2234 | |
2235 | static enum built_in_function report[2][2][6] |
2236 | = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2, |
2237 | BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8, |
2238 | BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N }, |
2239 | { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2, |
2240 | BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8, |
2241 | BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } }, |
2242 | { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT, |
2243 | BUILT_IN_ASAN_REPORT_LOAD2_NOABORT, |
2244 | BUILT_IN_ASAN_REPORT_LOAD4_NOABORT, |
2245 | BUILT_IN_ASAN_REPORT_LOAD8_NOABORT, |
2246 | BUILT_IN_ASAN_REPORT_LOAD16_NOABORT, |
2247 | BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT }, |
2248 | { BUILT_IN_ASAN_REPORT_STORE1_NOABORT, |
2249 | BUILT_IN_ASAN_REPORT_STORE2_NOABORT, |
2250 | BUILT_IN_ASAN_REPORT_STORE4_NOABORT, |
2251 | BUILT_IN_ASAN_REPORT_STORE8_NOABORT, |
2252 | BUILT_IN_ASAN_REPORT_STORE16_NOABORT, |
2253 | BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } }; |
2254 | if (size_in_bytes == -1) |
2255 | { |
2256 | *nargs = 2; |
2257 | return builtin_decl_implicit (report[recover_p][is_store][5]); |
2258 | } |
2259 | *nargs = 1; |
2260 | int size_log2 = exact_log2 (size_in_bytes); |
2261 | return builtin_decl_implicit (report[recover_p][is_store][size_log2]); |
2262 | } |
2263 | |
2264 | /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}. |
2265 | IS_STORE is either 1 (for a store) or 0 (for a load). */ |
2266 | |
2267 | static tree |
2268 | check_func (bool is_store, bool recover_p, HOST_WIDE_INTlong size_in_bytes, |
2269 | int *nargs) |
2270 | { |
2271 | static enum built_in_function check[2][2][6] |
2272 | = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2, |
2273 | BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8, |
2274 | BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN }, |
2275 | { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2, |
2276 | BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8, |
2277 | BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } }, |
2278 | { { BUILT_IN_ASAN_LOAD1_NOABORT, |
2279 | BUILT_IN_ASAN_LOAD2_NOABORT, |
2280 | BUILT_IN_ASAN_LOAD4_NOABORT, |
2281 | BUILT_IN_ASAN_LOAD8_NOABORT, |
2282 | BUILT_IN_ASAN_LOAD16_NOABORT, |
2283 | BUILT_IN_ASAN_LOADN_NOABORT }, |
2284 | { BUILT_IN_ASAN_STORE1_NOABORT, |
2285 | BUILT_IN_ASAN_STORE2_NOABORT, |
2286 | BUILT_IN_ASAN_STORE4_NOABORT, |
2287 | BUILT_IN_ASAN_STORE8_NOABORT, |
2288 | BUILT_IN_ASAN_STORE16_NOABORT, |
2289 | BUILT_IN_ASAN_STOREN_NOABORT } } }; |
2290 | if (size_in_bytes == -1) |
2291 | { |
2292 | *nargs = 2; |
2293 | return builtin_decl_implicit (check[recover_p][is_store][5]); |
2294 | } |
2295 | *nargs = 1; |
2296 | int size_log2 = exact_log2 (size_in_bytes); |
2297 | return builtin_decl_implicit (check[recover_p][is_store][size_log2]); |
2298 | } |
2299 | |
2300 | /* Split the current basic block and create a condition statement |
2301 | insertion point right before or after the statement pointed to by |
2302 | ITER. Return an iterator to the point at which the caller might |
2303 | safely insert the condition statement. |
2304 | |
2305 | THEN_BLOCK must be set to the address of an uninitialized instance |
2306 | of basic_block. The function will then set *THEN_BLOCK to the |
2307 | 'then block' of the condition statement to be inserted by the |
2308 | caller. |
2309 | |
2310 | If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from |
2311 | *THEN_BLOCK to *FALLTHROUGH_BLOCK. |
2312 | |
2313 | Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else |
2314 | block' of the condition statement to be inserted by the caller. |
2315 | |
2316 | Note that *FALLTHROUGH_BLOCK is a new block that contains the |
2317 | statements starting from *ITER, and *THEN_BLOCK is a new empty |
2318 | block. |
2319 | |
2320 | *ITER is adjusted to point to always point to the first statement |
2321 | of the basic block * FALLTHROUGH_BLOCK. That statement is the |
2322 | same as what ITER was pointing to prior to calling this function, |
2323 | if BEFORE_P is true; otherwise, it is its following statement. */ |
2324 | |
2325 | gimple_stmt_iterator |
2326 | create_cond_insert_point (gimple_stmt_iterator *iter, |
2327 | bool before_p, |
2328 | bool then_more_likely_p, |
2329 | bool create_then_fallthru_edge, |
2330 | basic_block *then_block, |
2331 | basic_block *fallthrough_block) |
2332 | { |
2333 | gimple_stmt_iterator gsi = *iter; |
2334 | |
2335 | if (!gsi_end_p (gsi) && before_p) |
2336 | gsi_prev (&gsi); |
2337 | |
2338 | basic_block cur_bb = gsi_bb (*iter); |
2339 | |
2340 | edge e = split_block (cur_bb, gsi_stmt (gsi)); |
2341 | |
2342 | /* Get a hold on the 'condition block', the 'then block' and the |
2343 | 'else block'. */ |
2344 | basic_block cond_bb = e->src; |
2345 | basic_block fallthru_bb = e->dest; |
2346 | basic_block then_bb = create_empty_bb (cond_bb); |
2347 | if (current_loops((cfun + 0)->x_current_loops)) |
2348 | { |
2349 | add_bb_to_loop (then_bb, cond_bb->loop_father); |
2350 | loops_state_set (LOOPS_NEED_FIXUP); |
2351 | } |
2352 | |
2353 | /* Set up the newly created 'then block'. */ |
2354 | e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); |
2355 | profile_probability fallthrough_probability |
2356 | = then_more_likely_p |
2357 | ? profile_probability::very_unlikely () |
2358 | : profile_probability::very_likely (); |
2359 | e->probability = fallthrough_probability.invert (); |
2360 | then_bb->count = e->count (); |
2361 | if (create_then_fallthru_edge) |
2362 | make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU); |
2363 | |
2364 | /* Set up the fallthrough basic block. */ |
2365 | e = find_edge (cond_bb, fallthru_bb); |
2366 | e->flags = EDGE_FALSE_VALUE; |
2367 | e->probability = fallthrough_probability; |
2368 | |
2369 | /* Update dominance info for the newly created then_bb; note that |
2370 | fallthru_bb's dominance info has already been updated by |
2371 | split_bock. */ |
2372 | if (dom_info_available_p (CDI_DOMINATORS)) |
2373 | set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); |
2374 | |
2375 | *then_block = then_bb; |
2376 | *fallthrough_block = fallthru_bb; |
2377 | *iter = gsi_start_bb (fallthru_bb); |
2378 | |
2379 | return gsi_last_bb (cond_bb); |
2380 | } |
2381 | |
2382 | /* Insert an if condition followed by a 'then block' right before the |
2383 | statement pointed to by ITER. The fallthrough block -- which is the |
2384 | else block of the condition as well as the destination of the |
2385 | outcoming edge of the 'then block' -- starts with the statement |
2386 | pointed to by ITER. |
2387 | |
2388 | COND is the condition of the if. |
2389 | |
2390 | If THEN_MORE_LIKELY_P is true, the probability of the edge to the |
2391 | 'then block' is higher than the probability of the edge to the |
2392 | fallthrough block. |
2393 | |
2394 | Upon completion of the function, *THEN_BB is set to the newly |
2395 | inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the |
2396 | fallthrough block. |
2397 | |
2398 | *ITER is adjusted to still point to the same statement it was |
2399 | pointing to initially. */ |
2400 | |
2401 | static void |
2402 | insert_if_then_before_iter (gcond *cond, |
2403 | gimple_stmt_iterator *iter, |
2404 | bool then_more_likely_p, |
2405 | basic_block *then_bb, |
2406 | basic_block *fallthrough_bb) |
2407 | { |
2408 | gimple_stmt_iterator cond_insert_point = |
2409 | create_cond_insert_point (iter, |
2410 | /*before_p=*/true, |
2411 | then_more_likely_p, |
2412 | /*create_then_fallthru_edge=*/true, |
2413 | then_bb, |
2414 | fallthrough_bb); |
2415 | gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT); |
2416 | } |
2417 | |
2418 | /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). |
2419 | If RETURN_ADDRESS is set to true, return memory location instread |
2420 | of a value in the shadow memory. */ |
2421 | |
2422 | static tree |
2423 | build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location, |
2424 | tree base_addr, tree shadow_ptr_type, |
2425 | bool return_address = false) |
2426 | { |
2427 | tree t, uintptr_type = TREE_TYPE (base_addr)((contains_struct_check ((base_addr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2427, __FUNCTION__))->typed.type); |
2428 | tree shadow_type = TREE_TYPE (shadow_ptr_type)((contains_struct_check ((shadow_ptr_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2428, __FUNCTION__))->typed.type); |
2429 | gimple *g; |
2430 | |
2431 | t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT3); |
2432 | g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR, |
2433 | base_addr, t); |
2434 | gimple_set_location (g, location); |
2435 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
2436 | |
2437 | t = build_int_cst (uintptr_type, asan_shadow_offset ()); |
2438 | g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR, |
2439 | gimple_assign_lhs (g), t); |
2440 | gimple_set_location (g, location); |
2441 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
2442 | |
2443 | g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR, |
2444 | gimple_assign_lhs (g)); |
2445 | gimple_set_location (g, location); |
2446 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
2447 | |
2448 | if (!return_address) |
2449 | { |
2450 | t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g), |
2451 | build_int_cst (shadow_ptr_type, 0)); |
2452 | g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t); |
2453 | gimple_set_location (g, location); |
2454 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
2455 | } |
2456 | |
2457 | return gimple_assign_lhs (g); |
2458 | } |
2459 | |
2460 | /* BASE can already be an SSA_NAME; in that case, do not create a |
2461 | new SSA_NAME for it. */ |
2462 | |
2463 | static tree |
2464 | maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter, |
2465 | bool before_p) |
2466 | { |
2467 | STRIP_USELESS_TYPE_CONVERSION (base)(base) = tree_ssa_strip_useless_type_conversions (base); |
2468 | if (TREE_CODE (base)((enum tree_code) (base)->base.code) == SSA_NAME) |
2469 | return base; |
2470 | gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2470, __FUNCTION__))->typed.type)), base); |
2471 | gimple_set_location (g, loc); |
2472 | if (before_p) |
2473 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
2474 | else |
2475 | gsi_insert_after (iter, g, GSI_NEW_STMT); |
2476 | return gimple_assign_lhs (g); |
2477 | } |
2478 | |
2479 | /* LEN can already have necessary size and precision; |
2480 | in that case, do not create a new variable. */ |
2481 | |
2482 | tree |
2483 | maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter, |
2484 | bool before_p) |
2485 | { |
2486 | if (ptrofftype_p (len)) |
2487 | return len; |
2488 | gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
2489 | NOP_EXPR, len); |
2490 | gimple_set_location (g, loc); |
2491 | if (before_p) |
2492 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
2493 | else |
2494 | gsi_insert_after (iter, g, GSI_NEW_STMT); |
2495 | return gimple_assign_lhs (g); |
2496 | } |
2497 | |
2498 | /* Instrument the memory access instruction BASE. Insert new |
2499 | statements before or after ITER. |
2500 | |
2501 | Note that the memory access represented by BASE can be either an |
2502 | SSA_NAME, or a non-SSA expression. LOCATION is the source code |
2503 | location. IS_STORE is TRUE for a store, FALSE for a load. |
2504 | BEFORE_P is TRUE for inserting the instrumentation code before |
2505 | ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE |
2506 | for a scalar memory access and FALSE for memory region access. |
2507 | NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero |
2508 | length. ALIGN tells alignment of accessed memory object. |
2509 | |
2510 | START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of |
2511 | memory region have already been instrumented. |
2512 | |
2513 | If BEFORE_P is TRUE, *ITER is arranged to still point to the |
2514 | statement it was pointing to prior to calling this function, |
2515 | otherwise, it points to the statement logically following it. */ |
2516 | |
2517 | static void |
2518 | build_check_stmt (location_t loc, tree base, tree len, |
2519 | HOST_WIDE_INTlong size_in_bytes, gimple_stmt_iterator *iter, |
2520 | bool is_non_zero_len, bool before_p, bool is_store, |
2521 | bool is_scalar_access, unsigned int align = 0) |
2522 | { |
2523 | gimple_stmt_iterator gsi = *iter; |
2524 | gimple *g; |
2525 | |
2526 | gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len))((void)(!(!(size_in_bytes > 0 && !is_non_zero_len) ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2526, __FUNCTION__), 0 : 0)); |
2527 | gcc_assert (size_in_bytes == -1 || size_in_bytes >= 1)((void)(!(size_in_bytes == -1 || size_in_bytes >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2527, __FUNCTION__), 0 : 0)); |
2528 | |
2529 | gsi = *iter; |
2530 | |
2531 | base = unshare_expr (base); |
2532 | base = maybe_create_ssa_name (loc, base, &gsi, before_p); |
2533 | |
2534 | if (len) |
2535 | { |
2536 | len = unshare_expr (len); |
2537 | len = maybe_cast_to_ptrmode (loc, len, iter, before_p); |
2538 | } |
2539 | else |
2540 | { |
2541 | gcc_assert (size_in_bytes != -1)((void)(!(size_in_bytes != -1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2541, __FUNCTION__), 0 : 0)); |
2542 | len = build_int_cst (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], size_in_bytes); |
2543 | } |
2544 | |
2545 | if (size_in_bytes > 1) |
2546 | { |
2547 | if ((size_in_bytes & (size_in_bytes - 1)) != 0 |
2548 | || size_in_bytes > 16) |
2549 | is_scalar_access = false; |
2550 | else if (align && align < size_in_bytes * BITS_PER_UNIT(8)) |
2551 | { |
2552 | /* On non-strict alignment targets, if |
2553 | 16-byte access is just 8-byte aligned, |
2554 | this will result in misaligned shadow |
2555 | memory 2 byte load, but otherwise can |
2556 | be handled using one read. */ |
2557 | if (size_in_bytes != 16 |
2558 | || STRICT_ALIGNMENT0 |
2559 | || align < 8 * BITS_PER_UNIT(8)) |
2560 | is_scalar_access = false; |
2561 | } |
2562 | } |
2563 | |
2564 | HOST_WIDE_INTlong flags = 0; |
2565 | if (is_store) |
2566 | flags |= ASAN_CHECK_STORE; |
2567 | if (is_non_zero_len) |
2568 | flags |= ASAN_CHECK_NON_ZERO_LEN; |
2569 | if (is_scalar_access) |
2570 | flags |= ASAN_CHECK_SCALAR_ACCESS; |
2571 | |
2572 | enum internal_fn fn = hwasan_sanitize_p () |
2573 | ? IFN_HWASAN_CHECK |
2574 | : IFN_ASAN_CHECK; |
2575 | |
2576 | g = gimple_build_call_internal (fn, 4, |
2577 | build_int_cst (integer_type_nodeinteger_types[itk_int], flags), |
2578 | base, len, |
2579 | build_int_cst (integer_type_nodeinteger_types[itk_int], |
2580 | align / BITS_PER_UNIT(8))); |
2581 | gimple_set_location (g, loc); |
2582 | if (before_p) |
2583 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); |
2584 | else |
2585 | { |
2586 | gsi_insert_after (&gsi, g, GSI_NEW_STMT); |
2587 | gsi_next (&gsi); |
2588 | *iter = gsi; |
2589 | } |
2590 | } |
2591 | |
2592 | /* If T represents a memory access, add instrumentation code before ITER. |
2593 | LOCATION is source code location. |
2594 | IS_STORE is either TRUE (for a store) or FALSE (for a load). */ |
2595 | |
2596 | static void |
2597 | instrument_derefs (gimple_stmt_iterator *iter, tree t, |
2598 | location_t location, bool is_store) |
2599 | { |
2600 | if (is_store && !(asan_instrument_writes () || hwasan_instrument_writes ())) |
2601 | return; |
2602 | if (!is_store && !(asan_instrument_reads () || hwasan_instrument_reads ())) |
2603 | return; |
2604 | |
2605 | tree type, base; |
2606 | HOST_WIDE_INTlong size_in_bytes; |
2607 | if (location == UNKNOWN_LOCATION((location_t) 0)) |
2608 | location = EXPR_LOCATION (t)((((t)) && ((tree_code_type[(int) (((enum tree_code) ( (t))->base.code))]) >= tcc_reference && (tree_code_type [(int) (((enum tree_code) ((t))->base.code))]) <= tcc_expression )) ? (t)->exp.locus : ((location_t) 0)); |
2609 | |
2610 | type = TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2610, __FUNCTION__))->typed.type); |
2611 | switch (TREE_CODE (t)((enum tree_code) (t)->base.code)) |
2612 | { |
2613 | case ARRAY_REF: |
2614 | case COMPONENT_REF: |
2615 | case INDIRECT_REF: |
2616 | case MEM_REF: |
2617 | case VAR_DECL: |
2618 | case BIT_FIELD_REF: |
2619 | break; |
2620 | /* FALLTHRU */ |
2621 | default: |
2622 | return; |
2623 | } |
2624 | |
2625 | size_in_bytes = int_size_in_bytes (type); |
2626 | if (size_in_bytes <= 0) |
2627 | return; |
2628 | |
2629 | poly_int64 bitsize, bitpos; |
2630 | tree offset; |
2631 | machine_mode mode; |
2632 | int unsignedp, reversep, volatilep = 0; |
2633 | tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode, |
2634 | &unsignedp, &reversep, &volatilep); |
2635 | |
2636 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) == COMPONENT_REF |
2637 | && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check ((t), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2637, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2637, __FUNCTION__, (FIELD_DECL)))->field_decl.qualifier ) != NULL_TREE(tree) nullptr) |
2638 | { |
2639 | tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check ((t), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2639, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2639, __FUNCTION__, (FIELD_DECL)))->field_decl.qualifier ); |
2640 | instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr)((contains_struct_check ((repr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2640, __FUNCTION__))->typed.type), |
2641 | TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2641, __FUNCTION__))))), repr, |
2642 | TREE_OPERAND (t, 2)(*((const_cast<tree*> (tree_operand_check ((t), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2642, __FUNCTION__)))))), |
2643 | location, is_store); |
2644 | return; |
2645 | } |
2646 | |
2647 | if (!multiple_p (bitpos, BITS_PER_UNIT(8)) |
2648 | || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT(8))) |
2649 | return; |
2650 | |
2651 | if (VAR_P (inner)(((enum tree_code) (inner)->base.code) == VAR_DECL) && DECL_HARD_REGISTER (inner)((tree_check ((inner), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2651, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register )) |
2652 | return; |
2653 | |
2654 | poly_int64 decl_size; |
2655 | if (VAR_P (inner)(((enum tree_code) (inner)->base.code) == VAR_DECL) |
2656 | && offset == NULL_TREE(tree) nullptr |
2657 | && DECL_SIZE (inner)((contains_struct_check ((inner), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2657, __FUNCTION__))->decl_common.size) |
2658 | && poly_int_tree_p (DECL_SIZE (inner)((contains_struct_check ((inner), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2658, __FUNCTION__))->decl_common.size), &decl_size) |
2659 | && known_subrange_p (bitpos, bitsize, 0, decl_size)) |
2660 | { |
2661 | if (DECL_THREAD_LOCAL_P (inner)((((inner)->base.static_flag) || ((contains_struct_check ( (inner), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2661, __FUNCTION__))->decl_common.decl_flag_1)) && decl_tls_model (inner) >= TLS_MODEL_REAL)) |
2662 | return; |
2663 | /* If we're not sanitizing globals and we can tell statically that this |
2664 | access is inside a global variable, then there's no point adding |
2665 | instrumentation to check the access. N.b. hwasan currently never |
2666 | sanitizes globals. */ |
2667 | if ((hwasan_sanitize_p () || !param_asan_globalsglobal_options.x_param_asan_globals) |
2668 | && is_global_var (inner)) |
2669 | return; |
2670 | if (!TREE_STATIC (inner)((inner)->base.static_flag)) |
2671 | { |
2672 | /* Automatic vars in the current function will be always |
2673 | accessible. */ |
2674 | if (decl_function_context (inner) == current_function_decl |
2675 | && (!asan_sanitize_use_after_scope () |
2676 | || !TREE_ADDRESSABLE (inner)((inner)->base.addressable_flag))) |
2677 | return; |
2678 | } |
2679 | /* Always instrument external vars, they might be dynamically |
2680 | initialized. */ |
2681 | else if (!DECL_EXTERNAL (inner)((contains_struct_check ((inner), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2681, __FUNCTION__))->decl_common.decl_flag_1)) |
2682 | { |
2683 | /* For static vars if they are known not to be dynamically |
2684 | initialized, they will be always accessible. */ |
2685 | varpool_node *vnode = varpool_node::get (inner); |
2686 | if (vnode && !vnode->dynamically_initialized) |
2687 | return; |
2688 | } |
2689 | } |
2690 | |
2691 | base = build_fold_addr_expr (t)build_fold_addr_expr_loc (((location_t) 0), (t)); |
2692 | if (!has_mem_ref_been_instrumented (base, size_in_bytes)) |
2693 | { |
2694 | unsigned int align = get_object_alignment (t); |
2695 | build_check_stmt (location, base, NULL_TREE(tree) nullptr, size_in_bytes, iter, |
2696 | /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true, |
2697 | is_store, /*is_scalar_access*/true, align); |
2698 | update_mem_ref_hash_table (base, size_in_bytes); |
2699 | update_mem_ref_hash_table (t, size_in_bytes); |
2700 | } |
2701 | |
2702 | } |
2703 | |
2704 | /* Insert a memory reference into the hash table if access length |
2705 | can be determined in compile time. */ |
2706 | |
2707 | static void |
2708 | maybe_update_mem_ref_hash_table (tree base, tree len) |
2709 | { |
2710 | if (!POINTER_TYPE_P (TREE_TYPE (base))(((enum tree_code) (((contains_struct_check ((base), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2710, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((base), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2710, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) |
2711 | || !INTEGRAL_TYPE_P (TREE_TYPE (len))(((enum tree_code) (((contains_struct_check ((len), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2711, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((len), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2711, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((len), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2711, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE )) |
2712 | return; |
2713 | |
2714 | HOST_WIDE_INTlong size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1; |
2715 | |
2716 | if (size_in_bytes != -1) |
2717 | update_mem_ref_hash_table (base, size_in_bytes); |
2718 | } |
2719 | |
2720 | /* Instrument an access to a contiguous memory region that starts at |
2721 | the address pointed to by BASE, over a length of LEN (expressed in |
2722 | the sizeof (*BASE) bytes). ITER points to the instruction before |
2723 | which the instrumentation instructions must be inserted. LOCATION |
2724 | is the source location that the instrumentation instructions must |
2725 | have. If IS_STORE is true, then the memory access is a store; |
2726 | otherwise, it's a load. */ |
2727 | |
2728 | static void |
2729 | instrument_mem_region_access (tree base, tree len, |
2730 | gimple_stmt_iterator *iter, |
2731 | location_t location, bool is_store) |
2732 | { |
2733 | if (!POINTER_TYPE_P (TREE_TYPE (base))(((enum tree_code) (((contains_struct_check ((base), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2733, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((base), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2733, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) |
2734 | || !INTEGRAL_TYPE_P (TREE_TYPE (len))(((enum tree_code) (((contains_struct_check ((len), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2734, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE || ((enum tree_code) (((contains_struct_check ((len), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2734, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE || ((enum tree_code) (((contains_struct_check ((len), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2734, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE ) |
2735 | || integer_zerop (len)) |
2736 | return; |
2737 | |
2738 | HOST_WIDE_INTlong size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1; |
2739 | |
2740 | if ((size_in_bytes == -1) |
2741 | || !has_mem_ref_been_instrumented (base, size_in_bytes)) |
2742 | { |
2743 | build_check_stmt (location, base, len, size_in_bytes, iter, |
2744 | /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true, |
2745 | is_store, /*is_scalar_access*/false, /*align*/0); |
2746 | } |
2747 | |
2748 | maybe_update_mem_ref_hash_table (base, len); |
2749 | *iter = gsi_for_stmt (gsi_stmt (*iter)); |
2750 | } |
2751 | |
2752 | /* Instrument the call to a built-in memory access function that is |
2753 | pointed to by the iterator ITER. |
2754 | |
2755 | Upon completion, return TRUE iff *ITER has been advanced to the |
2756 | statement following the one it was originally pointing to. */ |
2757 | |
2758 | static bool |
2759 | instrument_builtin_call (gimple_stmt_iterator *iter) |
2760 | { |
2761 | if (!(asan_memintrin () || hwasan_memintrin ())) |
2762 | return false; |
2763 | |
2764 | bool iter_advanced_p = false; |
2765 | gcall *call = as_a <gcall *> (gsi_stmt (*iter)); |
2766 | |
2767 | gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL))((void)(!(gimple_call_builtin_p (call, BUILT_IN_NORMAL)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2767, __FUNCTION__), 0 : 0)); |
2768 | |
2769 | location_t loc = gimple_location (call); |
2770 | |
2771 | asan_mem_ref src0, src1, dest; |
2772 | asan_mem_ref_init (&src0, NULLnullptr, 1); |
2773 | asan_mem_ref_init (&src1, NULLnullptr, 1); |
2774 | asan_mem_ref_init (&dest, NULLnullptr, 1); |
2775 | |
2776 | tree src0_len = NULL_TREE(tree) nullptr, src1_len = NULL_TREE(tree) nullptr, dest_len = NULL_TREE(tree) nullptr; |
2777 | bool src0_is_store = false, src1_is_store = false, dest_is_store = false, |
2778 | dest_is_deref = false, intercepted_p = true; |
2779 | |
2780 | if (get_mem_refs_of_builtin_call (call, |
2781 | &src0, &src0_len, &src0_is_store, |
2782 | &src1, &src1_len, &src1_is_store, |
2783 | &dest, &dest_len, &dest_is_store, |
2784 | &dest_is_deref, &intercepted_p, iter)) |
2785 | { |
2786 | if (dest_is_deref) |
2787 | { |
2788 | instrument_derefs (iter, dest.start, loc, dest_is_store); |
2789 | gsi_next (iter); |
2790 | iter_advanced_p = true; |
2791 | } |
2792 | else if (!intercepted_p |
2793 | && (src0_len || src1_len || dest_len)) |
2794 | { |
2795 | if (src0.start != NULL_TREE(tree) nullptr) |
2796 | instrument_mem_region_access (src0.start, src0_len, |
2797 | iter, loc, /*is_store=*/false); |
2798 | if (src1.start != NULL_TREE(tree) nullptr) |
2799 | instrument_mem_region_access (src1.start, src1_len, |
2800 | iter, loc, /*is_store=*/false); |
2801 | if (dest.start != NULL_TREE(tree) nullptr) |
2802 | instrument_mem_region_access (dest.start, dest_len, |
2803 | iter, loc, /*is_store=*/true); |
2804 | |
2805 | *iter = gsi_for_stmt (call); |
2806 | gsi_next (iter); |
2807 | iter_advanced_p = true; |
2808 | } |
2809 | else |
2810 | { |
2811 | if (src0.start != NULL_TREE(tree) nullptr) |
2812 | maybe_update_mem_ref_hash_table (src0.start, src0_len); |
2813 | if (src1.start != NULL_TREE(tree) nullptr) |
2814 | maybe_update_mem_ref_hash_table (src1.start, src1_len); |
2815 | if (dest.start != NULL_TREE(tree) nullptr) |
2816 | maybe_update_mem_ref_hash_table (dest.start, dest_len); |
2817 | } |
2818 | } |
2819 | return iter_advanced_p; |
2820 | } |
2821 | |
2822 | /* Instrument the assignment statement ITER if it is subject to |
2823 | instrumentation. Return TRUE iff instrumentation actually |
2824 | happened. In that case, the iterator ITER is advanced to the next |
2825 | logical expression following the one initially pointed to by ITER, |
2826 | and the relevant memory reference that which access has been |
2827 | instrumented is added to the memory references hash table. */ |
2828 | |
2829 | static bool |
2830 | maybe_instrument_assignment (gimple_stmt_iterator *iter) |
2831 | { |
2832 | gimple *s = gsi_stmt (*iter); |
2833 | |
2834 | gcc_assert (gimple_assign_single_p (s))((void)(!(gimple_assign_single_p (s)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 2834, __FUNCTION__), 0 : 0)); |
2835 | |
2836 | tree ref_expr = NULL_TREE(tree) nullptr; |
2837 | bool is_store, is_instrumented = false; |
2838 | |
2839 | if (gimple_store_p (s)) |
2840 | { |
2841 | ref_expr = gimple_assign_lhs (s); |
2842 | is_store = true; |
2843 | instrument_derefs (iter, ref_expr, |
2844 | gimple_location (s), |
2845 | is_store); |
2846 | is_instrumented = true; |
2847 | } |
2848 | |
2849 | if (gimple_assign_load_p (s)) |
2850 | { |
2851 | ref_expr = gimple_assign_rhs1 (s); |
2852 | is_store = false; |
2853 | instrument_derefs (iter, ref_expr, |
2854 | gimple_location (s), |
2855 | is_store); |
2856 | is_instrumented = true; |
2857 | } |
2858 | |
2859 | if (is_instrumented) |
2860 | gsi_next (iter); |
2861 | |
2862 | return is_instrumented; |
2863 | } |
2864 | |
2865 | /* Instrument the function call pointed to by the iterator ITER, if it |
2866 | is subject to instrumentation. At the moment, the only function |
2867 | calls that are instrumented are some built-in functions that access |
2868 | memory. Look at instrument_builtin_call to learn more. |
2869 | |
2870 | Upon completion return TRUE iff *ITER was advanced to the statement |
2871 | following the one it was originally pointing to. */ |
2872 | |
2873 | static bool |
2874 | maybe_instrument_call (gimple_stmt_iterator *iter) |
2875 | { |
2876 | gimple *stmt = gsi_stmt (*iter); |
2877 | bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL); |
2878 | |
2879 | if (is_builtin && instrument_builtin_call (iter)) |
2880 | return true; |
2881 | |
2882 | if (gimple_call_noreturn_p (stmt)) |
2883 | { |
2884 | if (is_builtin) |
2885 | { |
2886 | tree callee = gimple_call_fndecl (stmt); |
2887 | switch (DECL_FUNCTION_CODE (callee)) |
2888 | { |
2889 | case BUILT_IN_UNREACHABLE: |
2890 | case BUILT_IN_TRAP: |
2891 | /* Don't instrument these. */ |
2892 | return false; |
2893 | default: |
2894 | break; |
2895 | } |
2896 | } |
2897 | /* If a function does not return, then we must handle clearing up the |
2898 | shadow stack accordingly. For ASAN we can simply set the entire stack |
2899 | to "valid" for accesses by setting the shadow space to 0 and all |
2900 | accesses will pass checks. That means that some bad accesses may be |
2901 | missed, but we will not report any false positives. |
2902 | |
2903 | This is not possible for HWASAN. Since there is no "always valid" tag |
2904 | we can not set any space to "always valid". If we were to clear the |
2905 | entire shadow stack then code resuming from `longjmp` or a caught |
2906 | exception would trigger false positives when correctly accessing |
2907 | variables on the stack. Hence we need to handle things like |
2908 | `longjmp`, thread exit, and exceptions in a different way. These |
2909 | problems must be handled externally to the compiler, e.g. in the |
2910 | language runtime. */ |
2911 | if (! hwasan_sanitize_p ()) |
2912 | { |
2913 | tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN); |
2914 | gimple *g = gimple_build_call (decl, 0); |
2915 | gimple_set_location (g, gimple_location (stmt)); |
2916 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
2917 | } |
2918 | } |
2919 | |
2920 | bool instrumented = false; |
2921 | if (gimple_store_p (stmt)) |
2922 | { |
2923 | tree ref_expr = gimple_call_lhs (stmt); |
2924 | instrument_derefs (iter, ref_expr, |
2925 | gimple_location (stmt), |
2926 | /*is_store=*/true); |
2927 | |
2928 | instrumented = true; |
2929 | } |
2930 | |
2931 | /* Walk through gimple_call arguments and check them id needed. */ |
2932 | unsigned args_num = gimple_call_num_args (stmt); |
2933 | for (unsigned i = 0; i < args_num; ++i) |
2934 | { |
2935 | tree arg = gimple_call_arg (stmt, i); |
2936 | /* If ARG is not a non-aggregate register variable, compiler in general |
2937 | creates temporary for it and pass it as argument to gimple call. |
2938 | But in some cases, e.g. when we pass by value a small structure that |
2939 | fits to register, compiler can avoid extra overhead by pulling out |
2940 | these temporaries. In this case, we should check the argument. */ |
2941 | if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg)) |
2942 | { |
2943 | instrument_derefs (iter, arg, |
2944 | gimple_location (stmt), |
2945 | /*is_store=*/false); |
2946 | instrumented = true; |
2947 | } |
2948 | } |
2949 | if (instrumented) |
2950 | gsi_next (iter); |
2951 | return instrumented; |
2952 | } |
2953 | |
2954 | /* Walk each instruction of all basic block and instrument those that |
2955 | represent memory references: loads, stores, or function calls. |
2956 | In a given basic block, this function avoids instrumenting memory |
2957 | references that have already been instrumented. */ |
2958 | |
2959 | static void |
2960 | transform_statements (void) |
2961 | { |
2962 | basic_block bb, last_bb = NULLnullptr; |
2963 | gimple_stmt_iterator i; |
2964 | int saved_last_basic_block = last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block); |
2965 | |
2966 | FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb ; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb-> next_bb) |
2967 | { |
2968 | basic_block prev_bb = bb; |
2969 | |
2970 | if (bb->index >= saved_last_basic_block) continue; |
2971 | |
2972 | /* Flush the mem ref hash table, if current bb doesn't have |
2973 | exactly one predecessor, or if that predecessor (skipping |
2974 | over asan created basic blocks) isn't the last processed |
2975 | basic block. Thus we effectively flush on extended basic |
2976 | block boundaries. */ |
2977 | while (single_pred_p (prev_bb)) |
2978 | { |
2979 | prev_bb = single_pred (prev_bb); |
2980 | if (prev_bb->index < saved_last_basic_block) |
2981 | break; |
2982 | } |
2983 | if (prev_bb != last_bb) |
2984 | empty_mem_ref_hash_table (); |
2985 | last_bb = bb; |
2986 | |
2987 | for (i = gsi_start_bb (bb); !gsi_end_p (i);) |
2988 | { |
2989 | gimple *s = gsi_stmt (i); |
2990 | |
2991 | if (has_stmt_been_instrumented_p (s)) |
2992 | gsi_next (&i); |
2993 | else if (gimple_assign_single_p (s) |
2994 | && !gimple_clobber_p (s) |
2995 | && maybe_instrument_assignment (&i)) |
2996 | /* Nothing to do as maybe_instrument_assignment advanced |
2997 | the iterator I. */; |
2998 | else if (is_gimple_call (s) && maybe_instrument_call (&i)) |
2999 | /* Nothing to do as maybe_instrument_call |
3000 | advanced the iterator I. */; |
3001 | else |
3002 | { |
3003 | /* No instrumentation happened. |
3004 | |
3005 | If the current instruction is a function call that |
3006 | might free something, let's forget about the memory |
3007 | references that got instrumented. Otherwise we might |
3008 | miss some instrumentation opportunities. Do the same |
3009 | for a ASAN_MARK poisoning internal function. */ |
3010 | if (is_gimple_call (s) |
3011 | && (!nonfreeing_call_p (s) |
3012 | || asan_mark_p (s, ASAN_MARK_POISON))) |
3013 | empty_mem_ref_hash_table (); |
3014 | |
3015 | gsi_next (&i); |
3016 | } |
3017 | } |
3018 | } |
3019 | free_mem_ref_resources (); |
3020 | } |
3021 | |
3022 | /* Build |
3023 | __asan_before_dynamic_init (module_name) |
3024 | or |
3025 | __asan_after_dynamic_init () |
3026 | call. */ |
3027 | |
3028 | tree |
3029 | asan_dynamic_init_call (bool after_p) |
3030 | { |
3031 | if (shadow_ptr_types[0] == NULL_TREE(tree) nullptr) |
3032 | asan_init_shadow_ptr_types (); |
3033 | |
3034 | tree fn = builtin_decl_implicit (after_p |
3035 | ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT |
3036 | : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT); |
3037 | tree module_name_cst = NULL_TREE(tree) nullptr; |
3038 | if (!after_p) |
3039 | { |
3040 | pretty_printer module_name_pp; |
3041 | pp_string (&module_name_pp, main_input_filenameglobal_options.x_main_input_filename); |
3042 | |
3043 | module_name_cst = asan_pp_string (&module_name_pp); |
3044 | module_name_cst = fold_convert (const_ptr_type_node,fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE ], module_name_cst) |
3045 | module_name_cst)fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE ], module_name_cst); |
3046 | } |
3047 | |
3048 | return build_call_expr (fn, after_p ? 0 : 1, module_name_cst); |
3049 | } |
3050 | |
3051 | /* Build |
3052 | struct __asan_global |
3053 | { |
3054 | const void *__beg; |
3055 | uptr __size; |
3056 | uptr __size_with_redzone; |
3057 | const void *__name; |
3058 | const void *__module_name; |
3059 | uptr __has_dynamic_init; |
3060 | __asan_global_source_location *__location; |
3061 | char *__odr_indicator; |
3062 | } type. */ |
3063 | |
3064 | static tree |
3065 | asan_global_struct (void) |
3066 | { |
3067 | static const char *field_names[] |
3068 | = { "__beg", "__size", "__size_with_redzone", |
3069 | "__name", "__module_name", "__has_dynamic_init", "__location", |
3070 | "__odr_indicator" }; |
3071 | tree fields[ARRAY_SIZE (field_names)(sizeof (field_names) / sizeof ((field_names)[0]))], ret; |
3072 | unsigned i; |
3073 | |
3074 | ret = make_node (RECORD_TYPE); |
3075 | for (i = 0; i < ARRAY_SIZE (field_names)(sizeof (field_names) / sizeof ((field_names)[0])); i++) |
3076 | { |
3077 | fields[i] |
3078 | = build_decl (UNKNOWN_LOCATION((location_t) 0), FIELD_DECL, |
3079 | get_identifier (field_names[i])(__builtin_constant_p (field_names[i]) ? get_identifier_with_length ((field_names[i]), strlen (field_names[i])) : get_identifier (field_names[i])), |
3080 | (i == 0 || i == 3) ? const_ptr_type_nodeglobal_trees[TI_CONST_PTR_TYPE] |
3081 | : pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]); |
3082 | DECL_CONTEXT (fields[i])((contains_struct_check ((fields[i]), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3082, __FUNCTION__))->decl_minimal.context) = ret; |
3083 | if (i) |
3084 | DECL_CHAIN (fields[i - 1])(((contains_struct_check (((contains_struct_check ((fields[i - 1]), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3084, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3084, __FUNCTION__))->common.chain)) = fields[i]; |
3085 | } |
3086 | tree type_decl = build_decl (input_location, TYPE_DECL, |
3087 | get_identifier ("__asan_global")(__builtin_constant_p ("__asan_global") ? get_identifier_with_length (("__asan_global"), strlen ("__asan_global")) : get_identifier ("__asan_global")), ret); |
3088 | DECL_IGNORED_P (type_decl)((contains_struct_check ((type_decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3088, __FUNCTION__))->decl_common.ignored_flag) = 1; |
3089 | DECL_ARTIFICIAL (type_decl)((contains_struct_check ((type_decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3089, __FUNCTION__))->decl_common.artificial_flag) = 1; |
3090 | TYPE_FIELDS (ret)((tree_check3 ((ret), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3090, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values) = fields[0]; |
3091 | TYPE_NAME (ret)((tree_class_check ((ret), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3091, __FUNCTION__))->type_common.name) = type_decl; |
3092 | TYPE_STUB_DECL (ret)(((contains_struct_check (((tree_class_check ((ret), (tcc_type ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3092, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3092, __FUNCTION__))->common.chain)) = type_decl; |
3093 | TYPE_ARTIFICIAL (ret)((tree_class_check ((ret), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3093, __FUNCTION__))->base.nowarning_flag) = 1; |
3094 | layout_type (ret); |
3095 | return ret; |
3096 | } |
3097 | |
3098 | /* Create and return odr indicator symbol for DECL. |
3099 | TYPE is __asan_global struct type as returned by asan_global_struct. */ |
3100 | |
3101 | static tree |
3102 | create_odr_indicator (tree decl, tree type) |
3103 | { |
3104 | char *name; |
3105 | tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)))((contains_struct_check (((((contains_struct_check (((contains_struct_check ((((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3105, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values)), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3105, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3105, __FUNCTION__))->common.chain))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3105, __FUNCTION__))->typed.type); |
3106 | tree decl_name |
3107 | = (HAS_DECL_ASSEMBLER_NAME_P (decl)((tree_contains_struct[(((enum tree_code) (decl)->base.code ))][(TS_DECL_WITH_VIS)])) ? DECL_ASSEMBLER_NAME (decl)decl_assembler_name (decl) |
3108 | : DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3108, __FUNCTION__))->decl_minimal.name)); |
3109 | /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */ |
3110 | if (decl_name == NULL_TREE(tree) nullptr) |
3111 | return build_int_cst (uptr, 0); |
3112 | const char *dname = IDENTIFIER_POINTER (decl_name)((const char *) (tree_check ((decl_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3112, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str ); |
3113 | if (HAS_DECL_ASSEMBLER_NAME_P (decl)((tree_contains_struct[(((enum tree_code) (decl)->base.code ))][(TS_DECL_WITH_VIS)]))) |
3114 | dname = targetm.strip_name_encoding (dname); |
3115 | size_t len = strlen (dname) + sizeof ("__odr_asan_"); |
3116 | name = XALLOCAVEC (char, len)((char *) __builtin_alloca(sizeof (char) * (len))); |
3117 | snprintf (name, len, "__odr_asan_%s", dname); |
3118 | #ifndef NO_DOT_IN_LABEL |
3119 | name[sizeof ("__odr_asan") - 1] = '.'; |
3120 | #elif !defined(NO_DOLLAR_IN_LABEL) |
3121 | name[sizeof ("__odr_asan") - 1] = '$'; |
3122 | #endif |
3123 | tree var = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, get_identifier (name)(__builtin_constant_p (name) ? get_identifier_with_length ((name ), strlen (name)) : get_identifier (name)), |
3124 | char_type_nodeinteger_types[itk_char]); |
3125 | TREE_ADDRESSABLE (var)((var)->base.addressable_flag) = 1; |
3126 | TREE_READONLY (var)((non_type_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3126, __FUNCTION__))->base.readonly_flag) = 0; |
3127 | TREE_THIS_VOLATILE (var)((var)->base.volatile_flag) = 1; |
3128 | DECL_ARTIFICIAL (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3128, __FUNCTION__))->decl_common.artificial_flag) = 1; |
3129 | DECL_IGNORED_P (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3129, __FUNCTION__))->decl_common.ignored_flag) = 1; |
3130 | TREE_STATIC (var)((var)->base.static_flag) = 1; |
3131 | TREE_PUBLIC (var)((var)->base.public_flag) = 1; |
3132 | DECL_VISIBILITY (var)((contains_struct_check ((var), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3132, __FUNCTION__))->decl_with_vis.visibility) = DECL_VISIBILITY (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3132, __FUNCTION__))->decl_with_vis.visibility); |
3133 | DECL_VISIBILITY_SPECIFIED (var)((contains_struct_check ((var), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3133, __FUNCTION__))->decl_with_vis.visibility_specified ) = DECL_VISIBILITY_SPECIFIED (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3133, __FUNCTION__))->decl_with_vis.visibility_specified ); |
3134 | |
3135 | TREE_USED (var)((var)->base.used_flag) = 1; |
3136 | tree ctor = build_constructor_va (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3136, __FUNCTION__))->typed.type), 1, NULL_TREE(tree) nullptr, |
3137 | build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], 0)); |
3138 | TREE_CONSTANT (ctor)((non_type_check ((ctor), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3138, __FUNCTION__))->base.constant_flag) = 1; |
3139 | TREE_STATIC (ctor)((ctor)->base.static_flag) = 1; |
3140 | DECL_INITIAL (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3140, __FUNCTION__))->decl_common.initial) = ctor; |
3141 | DECL_ATTRIBUTES (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3141, __FUNCTION__))->decl_common.attributes) = tree_cons (get_identifier ("asan odr indicator")(__builtin_constant_p ("asan odr indicator") ? get_identifier_with_length (("asan odr indicator"), strlen ("asan odr indicator")) : get_identifier ("asan odr indicator")), |
3142 | NULLnullptr, DECL_ATTRIBUTES (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3142, __FUNCTION__))->decl_common.attributes)); |
3143 | make_decl_rtl (var); |
3144 | varpool_node::finalize_decl (var); |
3145 | return fold_convert (uptr, build_fold_addr_expr (var))fold_convert_loc (((location_t) 0), uptr, build_fold_addr_expr_loc (((location_t) 0), (var))); |
3146 | } |
3147 | |
3148 | /* Return true if DECL, a global var, might be overridden and needs |
3149 | an additional odr indicator symbol. */ |
3150 | |
3151 | static bool |
3152 | asan_needs_odr_indicator_p (tree decl) |
3153 | { |
3154 | /* Don't emit ODR indicators for kernel because: |
3155 | a) Kernel is written in C thus doesn't need ODR indicators. |
3156 | b) Some kernel code may have assumptions about symbols containing specific |
3157 | patterns in their names. Since ODR indicators contain original names |
3158 | of symbols they are emitted for, these assumptions would be broken for |
3159 | ODR indicator symbols. */ |
3160 | return (!(flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_KERNEL_ADDRESS) |
3161 | && !DECL_ARTIFICIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3161, __FUNCTION__))->decl_common.artificial_flag) |
3162 | && !DECL_WEAK (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3162, __FUNCTION__))->decl_with_vis.weak_flag) |
3163 | && TREE_PUBLIC (decl)((decl)->base.public_flag)); |
3164 | } |
3165 | |
3166 | /* Append description of a single global DECL into vector V. |
3167 | TYPE is __asan_global struct type as returned by asan_global_struct. */ |
3168 | |
3169 | static void |
3170 | asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v) |
3171 | { |
3172 | tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)))((contains_struct_check (((((contains_struct_check (((contains_struct_check ((((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3172, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE )))->type_non_common.values)), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3172, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3172, __FUNCTION__))->common.chain))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3172, __FUNCTION__))->typed.type); |
3173 | unsigned HOST_WIDE_INTlong size; |
3174 | tree str_cst, module_name_cst, refdecl = decl; |
3175 | vec<constructor_elt, va_gc> *vinner = NULLnullptr; |
3176 | |
3177 | pretty_printer asan_pp, module_name_pp; |
3178 | |
3179 | if (DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3179, __FUNCTION__))->decl_minimal.name)) |
3180 | pp_tree_identifier (&asan_pp, DECL_NAME (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3180, __FUNCTION__))->decl_minimal.name)); |
3181 | else |
3182 | pp_string (&asan_pp, "<unknown>"); |
3183 | str_cst = asan_pp_string (&asan_pp); |
3184 | |
3185 | pp_string (&module_name_pp, main_input_filenameglobal_options.x_main_input_filename); |
3186 | module_name_cst = asan_pp_string (&module_name_pp); |
3187 | |
3188 | if (asan_needs_local_alias (decl)) |
3189 | { |
3190 | char buf[20]; |
3191 | ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1)do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy ( &(buf)[2], "LASAN"); sprint_ul (__p, (unsigned long) (vec_safe_length (v) + 1)); } while (0); |
3192 | refdecl = build_decl (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3192, __FUNCTION__))->decl_minimal.locus), |
3193 | VAR_DECL, get_identifier (buf)(__builtin_constant_p (buf) ? get_identifier_with_length ((buf ), strlen (buf)) : get_identifier (buf)), TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3193, __FUNCTION__))->typed.type)); |
3194 | TREE_ADDRESSABLE (refdecl)((refdecl)->base.addressable_flag) = TREE_ADDRESSABLE (decl)((decl)->base.addressable_flag); |
3195 | TREE_READONLY (refdecl)((non_type_check ((refdecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3195, __FUNCTION__))->base.readonly_flag) = TREE_READONLY (decl)((non_type_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3195, __FUNCTION__))->base.readonly_flag); |
3196 | TREE_THIS_VOLATILE (refdecl)((refdecl)->base.volatile_flag) = TREE_THIS_VOLATILE (decl)((decl)->base.volatile_flag); |
3197 | DECL_NOT_GIMPLE_REG_P (refdecl)(contains_struct_check ((refdecl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3197, __FUNCTION__))->decl_common.not_gimple_reg_flag = DECL_NOT_GIMPLE_REG_P (decl)(contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3197, __FUNCTION__))->decl_common.not_gimple_reg_flag; |
3198 | DECL_ARTIFICIAL (refdecl)((contains_struct_check ((refdecl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3198, __FUNCTION__))->decl_common.artificial_flag) = DECL_ARTIFICIAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3198, __FUNCTION__))->decl_common.artificial_flag); |
3199 | DECL_IGNORED_P (refdecl)((contains_struct_check ((refdecl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3199, __FUNCTION__))->decl_common.ignored_flag) = DECL_IGNORED_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3199, __FUNCTION__))->decl_common.ignored_flag); |
3200 | TREE_STATIC (refdecl)((refdecl)->base.static_flag) = 1; |
3201 | TREE_PUBLIC (refdecl)((refdecl)->base.public_flag) = 0; |
3202 | TREE_USED (refdecl)((refdecl)->base.used_flag) = 1; |
3203 | assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl)decl_assembler_name (decl)); |
3204 | } |
3205 | |
3206 | tree odr_indicator_ptr |
3207 | = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type) |
3208 | : build_int_cst (uptr, 0)); |
3209 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], build_fold_addr_expr_loc (((location_t) 0), (refdecl)))}; vec_safe_push ((vinner), _ce___ ); } while (0) |
3210 | fold_convert (const_ptr_type_node,do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], build_fold_addr_expr_loc (((location_t) 0), (refdecl)))}; vec_safe_push ((vinner), _ce___ ); } while (0) |
3211 | build_fold_addr_expr (refdecl)))do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], build_fold_addr_expr_loc (((location_t) 0), (refdecl)))}; vec_safe_push ((vinner), _ce___ ); } while (0); |
3212 | size = tree_to_uhwi (DECL_SIZE_UNIT (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3212, __FUNCTION__))->decl_common.size_unit)); |
3213 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size))do { constructor_elt _ce___ = {(tree) nullptr, build_int_cst ( uptr, size)}; vec_safe_push ((vinner), _ce___); } while (0); |
3214 | size += asan_red_zone_size (size); |
3215 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size))do { constructor_elt _ce___ = {(tree) nullptr, build_int_cst ( uptr, size)}; vec_safe_push ((vinner), _ce___); } while (0); |
3216 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], str_cst) }; vec_safe_push ((vinner), _ce___); } while (0) |
3217 | fold_convert (const_ptr_type_node, str_cst))do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], str_cst) }; vec_safe_push ((vinner), _ce___); } while (0); |
3218 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], module_name_cst )}; vec_safe_push ((vinner), _ce___); } while (0) |
3219 | fold_convert (const_ptr_type_node, module_name_cst))do { constructor_elt _ce___ = {(tree) nullptr, fold_convert_loc (((location_t) 0), global_trees[TI_CONST_PTR_TYPE], module_name_cst )}; vec_safe_push ((vinner), _ce___); } while (0); |
3220 | varpool_node *vnode = varpool_node::get (decl); |
3221 | int has_dynamic_init = 0; |
3222 | /* FIXME: Enable initialization order fiasco detection in LTO mode once |
3223 | proper fix for PR 79061 will be applied. */ |
3224 | if (!in_lto_pglobal_options.x_in_lto_p) |
3225 | has_dynamic_init = vnode ? vnode->dynamically_initialized : 0; |
3226 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, build_int_cst ( uptr, has_dynamic_init)}; vec_safe_push ((vinner), _ce___); } while (0) |
3227 | build_int_cst (uptr, has_dynamic_init))do { constructor_elt _ce___ = {(tree) nullptr, build_int_cst ( uptr, has_dynamic_init)}; vec_safe_push ((vinner), _ce___); } while (0); |
3228 | tree locptr = NULL_TREE(tree) nullptr; |
3229 | location_t loc = DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3229, __FUNCTION__))->decl_minimal.locus); |
3230 | expanded_location xloc = expand_location (loc); |
3231 | if (xloc.file != NULLnullptr) |
3232 | { |
3233 | static int lasanloccnt = 0; |
3234 | char buf[25]; |
3235 | ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt)do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy ( &(buf)[2], "LASANLOC"); sprint_ul (__p, (unsigned long) ( ++lasanloccnt)); } while (0); |
3236 | tree var = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, get_identifier (buf)(__builtin_constant_p (buf) ? get_identifier_with_length ((buf ), strlen (buf)) : get_identifier (buf)), |
3237 | ubsan_get_source_location_type ()); |
3238 | TREE_STATIC (var)((var)->base.static_flag) = 1; |
3239 | TREE_PUBLIC (var)((var)->base.public_flag) = 0; |
3240 | DECL_ARTIFICIAL (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3240, __FUNCTION__))->decl_common.artificial_flag) = 1; |
3241 | DECL_IGNORED_P (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3241, __FUNCTION__))->decl_common.ignored_flag) = 1; |
3242 | pretty_printer filename_pp; |
3243 | pp_string (&filename_pp, xloc.file); |
3244 | tree str = asan_pp_string (&filename_pp); |
3245 | tree ctor = build_constructor_va (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3245, __FUNCTION__))->typed.type), 3, |
3246 | NULL_TREE(tree) nullptr, str, NULL_TREE(tree) nullptr, |
3247 | build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], |
3248 | xloc.line), NULL_TREE(tree) nullptr, |
3249 | build_int_cst (unsigned_type_nodeinteger_types[itk_unsigned_int], |
3250 | xloc.column)); |
3251 | TREE_CONSTANT (ctor)((non_type_check ((ctor), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3251, __FUNCTION__))->base.constant_flag) = 1; |
3252 | TREE_STATIC (ctor)((ctor)->base.static_flag) = 1; |
3253 | DECL_INITIAL (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3253, __FUNCTION__))->decl_common.initial) = ctor; |
3254 | varpool_node::finalize_decl (var); |
3255 | locptr = fold_convert (uptr, build_fold_addr_expr (var))fold_convert_loc (((location_t) 0), uptr, build_fold_addr_expr_loc (((location_t) 0), (var))); |
3256 | } |
3257 | else |
3258 | locptr = build_int_cst (uptr, 0); |
3259 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr)do { constructor_elt _ce___ = {(tree) nullptr, locptr}; vec_safe_push ((vinner), _ce___); } while (0); |
3260 | CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr)do { constructor_elt _ce___ = {(tree) nullptr, odr_indicator_ptr }; vec_safe_push ((vinner), _ce___); } while (0); |
3261 | init = build_constructor (type, vinner); |
3262 | CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init)do { constructor_elt _ce___ = {(tree) nullptr, init}; vec_safe_push ((v), _ce___); } while (0); |
3263 | } |
3264 | |
3265 | /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */ |
3266 | void |
3267 | initialize_sanitizer_builtins (void) |
3268 | { |
3269 | tree decl; |
3270 | |
3271 | if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT)) |
3272 | return; |
3273 | |
3274 | tree BT_FN_VOID = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], NULL_TREE(tree) nullptr); |
3275 | tree BT_FN_VOID_PTR |
3276 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], ptr_type_nodeglobal_trees[TI_PTR_TYPE], NULL_TREE(tree) nullptr); |
3277 | tree BT_FN_VOID_CONST_PTR |
3278 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], const_ptr_type_nodeglobal_trees[TI_CONST_PTR_TYPE], NULL_TREE(tree) nullptr); |
3279 | tree BT_FN_VOID_PTR_PTR |
3280 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], ptr_type_nodeglobal_trees[TI_PTR_TYPE], |
3281 | ptr_type_nodeglobal_trees[TI_PTR_TYPE], NULL_TREE(tree) nullptr); |
3282 | tree BT_FN_VOID_PTR_PTR_PTR |
3283 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], ptr_type_nodeglobal_trees[TI_PTR_TYPE], |
3284 | ptr_type_nodeglobal_trees[TI_PTR_TYPE], ptr_type_nodeglobal_trees[TI_PTR_TYPE], NULL_TREE(tree) nullptr); |
3285 | tree BT_FN_VOID_PTR_PTRMODE |
3286 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], ptr_type_nodeglobal_trees[TI_PTR_TYPE], |
3287 | pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], NULL_TREE(tree) nullptr); |
3288 | tree BT_FN_VOID_INT |
3289 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], integer_type_nodeinteger_types[itk_int], NULL_TREE(tree) nullptr); |
3290 | tree BT_FN_SIZE_CONST_PTR_INT |
3291 | = build_function_type_list (size_type_nodeglobal_trees[TI_SIZE_TYPE], const_ptr_type_nodeglobal_trees[TI_CONST_PTR_TYPE], |
3292 | integer_type_nodeinteger_types[itk_int], NULL_TREE(tree) nullptr); |
3293 | |
3294 | tree BT_FN_VOID_UINT8_UINT8 |
3295 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], unsigned_char_type_nodeinteger_types[itk_unsigned_char], |
3296 | unsigned_char_type_nodeinteger_types[itk_unsigned_char], NULL_TREE(tree) nullptr); |
3297 | tree BT_FN_VOID_UINT16_UINT16 |
3298 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], uint16_type_nodeglobal_trees[TI_UINT16_TYPE], |
3299 | uint16_type_nodeglobal_trees[TI_UINT16_TYPE], NULL_TREE(tree) nullptr); |
3300 | tree BT_FN_VOID_UINT32_UINT32 |
3301 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], uint32_type_nodeglobal_trees[TI_UINT32_TYPE], |
3302 | uint32_type_nodeglobal_trees[TI_UINT32_TYPE], NULL_TREE(tree) nullptr); |
3303 | tree BT_FN_VOID_UINT64_UINT64 |
3304 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], uint64_type_nodeglobal_trees[TI_UINT64_TYPE], |
3305 | uint64_type_nodeglobal_trees[TI_UINT64_TYPE], NULL_TREE(tree) nullptr); |
3306 | tree BT_FN_VOID_FLOAT_FLOAT |
3307 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], float_type_nodeglobal_trees[TI_FLOAT_TYPE], |
3308 | float_type_nodeglobal_trees[TI_FLOAT_TYPE], NULL_TREE(tree) nullptr); |
3309 | tree BT_FN_VOID_DOUBLE_DOUBLE |
3310 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], double_type_nodeglobal_trees[TI_DOUBLE_TYPE], |
3311 | double_type_nodeglobal_trees[TI_DOUBLE_TYPE], NULL_TREE(tree) nullptr); |
3312 | tree BT_FN_VOID_UINT64_PTR |
3313 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], uint64_type_nodeglobal_trees[TI_UINT64_TYPE], |
3314 | ptr_type_nodeglobal_trees[TI_PTR_TYPE], NULL_TREE(tree) nullptr); |
3315 | |
3316 | tree BT_FN_PTR_CONST_PTR_UINT8 |
3317 | = build_function_type_list (ptr_type_nodeglobal_trees[TI_PTR_TYPE], const_ptr_type_nodeglobal_trees[TI_CONST_PTR_TYPE], |
3318 | unsigned_char_type_nodeinteger_types[itk_unsigned_char], NULL_TREE(tree) nullptr); |
3319 | tree BT_FN_VOID_PTR_UINT8_PTRMODE |
3320 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], ptr_type_nodeglobal_trees[TI_PTR_TYPE], |
3321 | unsigned_char_type_nodeinteger_types[itk_unsigned_char], |
3322 | pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], NULL_TREE(tree) nullptr); |
3323 | |
3324 | tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5]; |
3325 | tree BT_FN_IX_CONST_VPTR_INT[5]; |
3326 | tree BT_FN_IX_VPTR_IX_INT[5]; |
3327 | tree BT_FN_VOID_VPTR_IX_INT[5]; |
3328 | tree vptr |
3329 | = build_pointer_type (build_qualified_type (void_type_nodeglobal_trees[TI_VOID_TYPE], |
3330 | TYPE_QUAL_VOLATILE)); |
3331 | tree cvptr |
3332 | = build_pointer_type (build_qualified_type (void_type_nodeglobal_trees[TI_VOID_TYPE], |
3333 | TYPE_QUAL_VOLATILE |
3334 | |TYPE_QUAL_CONST)); |
3335 | tree boolt |
3336 | = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE(8), 1); |
3337 | int i; |
3338 | for (i = 0; i < 5; i++) |
3339 | { |
3340 | tree ix = build_nonstandard_integer_type (BITS_PER_UNIT(8) * (1 << i), 1); |
3341 | BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i] |
3342 | = build_function_type_list (boolt, vptr, ptr_type_nodeglobal_trees[TI_PTR_TYPE], ix, |
3343 | integer_type_nodeinteger_types[itk_int], integer_type_nodeinteger_types[itk_int], |
3344 | NULL_TREE(tree) nullptr); |
3345 | BT_FN_IX_CONST_VPTR_INT[i] |
3346 | = build_function_type_list (ix, cvptr, integer_type_nodeinteger_types[itk_int], NULL_TREE(tree) nullptr); |
3347 | BT_FN_IX_VPTR_IX_INT[i] |
3348 | = build_function_type_list (ix, vptr, ix, integer_type_nodeinteger_types[itk_int], |
3349 | NULL_TREE(tree) nullptr); |
3350 | BT_FN_VOID_VPTR_IX_INT[i] |
3351 | = build_function_type_list (void_type_nodeglobal_trees[TI_VOID_TYPE], vptr, ix, |
3352 | integer_type_nodeinteger_types[itk_int], NULL_TREE(tree) nullptr); |
3353 | } |
3354 | #define BT_FN_BOOL_VPTR_PTR_I1_INT_INTBT_FN_BOOL_VPTR_PTR_IX_INT_INT[0] BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0] |
3355 | #define BT_FN_I1_CONST_VPTR_INTBT_FN_IX_CONST_VPTR_INT[0] BT_FN_IX_CONST_VPTR_INT[0] |
3356 | #define BT_FN_I1_VPTR_I1_INTBT_FN_IX_VPTR_IX_INT[0] BT_FN_IX_VPTR_IX_INT[0] |
3357 | #define BT_FN_VOID_VPTR_I1_INTBT_FN_VOID_VPTR_IX_INT[0] BT_FN_VOID_VPTR_IX_INT[0] |
3358 | #define BT_FN_BOOL_VPTR_PTR_I2_INT_INTBT_FN_BOOL_VPTR_PTR_IX_INT_INT[1] BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1] |
3359 | #define BT_FN_I2_CONST_VPTR_INTBT_FN_IX_CONST_VPTR_INT[1] BT_FN_IX_CONST_VPTR_INT[1] |
3360 | #define BT_FN_I2_VPTR_I2_INTBT_FN_IX_VPTR_IX_INT[1] BT_FN_IX_VPTR_IX_INT[1] |
3361 | #define BT_FN_VOID_VPTR_I2_INTBT_FN_VOID_VPTR_IX_INT[1] BT_FN_VOID_VPTR_IX_INT[1] |
3362 | #define BT_FN_BOOL_VPTR_PTR_I4_INT_INTBT_FN_BOOL_VPTR_PTR_IX_INT_INT[2] BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2] |
3363 | #define BT_FN_I4_CONST_VPTR_INTBT_FN_IX_CONST_VPTR_INT[2] BT_FN_IX_CONST_VPTR_INT[2] |
3364 | #define BT_FN_I4_VPTR_I4_INTBT_FN_IX_VPTR_IX_INT[2] BT_FN_IX_VPTR_IX_INT[2] |
3365 | #define BT_FN_VOID_VPTR_I4_INTBT_FN_VOID_VPTR_IX_INT[2] BT_FN_VOID_VPTR_IX_INT[2] |
3366 | #define BT_FN_BOOL_VPTR_PTR_I8_INT_INTBT_FN_BOOL_VPTR_PTR_IX_INT_INT[3] BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3] |
3367 | #define BT_FN_I8_CONST_VPTR_INTBT_FN_IX_CONST_VPTR_INT[3] BT_FN_IX_CONST_VPTR_INT[3] |
3368 | #define BT_FN_I8_VPTR_I8_INTBT_FN_IX_VPTR_IX_INT[3] BT_FN_IX_VPTR_IX_INT[3] |
3369 | #define BT_FN_VOID_VPTR_I8_INTBT_FN_VOID_VPTR_IX_INT[3] BT_FN_VOID_VPTR_IX_INT[3] |
3370 | #define BT_FN_BOOL_VPTR_PTR_I16_INT_INTBT_FN_BOOL_VPTR_PTR_IX_INT_INT[4] BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4] |
3371 | #define BT_FN_I16_CONST_VPTR_INTBT_FN_IX_CONST_VPTR_INT[4] BT_FN_IX_CONST_VPTR_INT[4] |
3372 | #define BT_FN_I16_VPTR_I16_INTBT_FN_IX_VPTR_IX_INT[4] BT_FN_IX_VPTR_IX_INT[4] |
3373 | #define BT_FN_VOID_VPTR_I16_INTBT_FN_VOID_VPTR_IX_INT[4] BT_FN_VOID_VPTR_IX_INT[4] |
3374 | #undef ATTR_NOTHROW_LIST(1 << 6) |
3375 | #define ATTR_NOTHROW_LIST(1 << 6) ECF_NOTHROW(1 << 6) |
3376 | #undef ATTR_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) |
3377 | #define ATTR_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) ECF_NOTHROW(1 << 6) | ECF_LEAF(1 << 10) |
3378 | #undef ATTR_TMPURE_NOTHROW_LEAF_LIST(1 << 12) | (1 << 6) | (1 << 10) |
3379 | #define ATTR_TMPURE_NOTHROW_LEAF_LIST(1 << 12) | (1 << 6) | (1 << 10) ECF_TM_PURE(1 << 12) | ATTR_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) |
3380 | #undef ATTR_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) |
3381 | #define ATTR_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) ECF_NORETURN(1 << 3) | ATTR_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) |
3382 | #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST(1 << 0) | (1 << 3) | (1 << 6) | (1 << 10) |
3383 | #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST(1 << 0) | (1 << 3) | (1 << 6) | (1 << 10) \ |
3384 | ECF_CONST(1 << 0) | ATTR_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) |
3385 | #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST(1 << 12) | (1 << 3) | (1 << 6) | (1 << 10) |
3386 | #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST(1 << 12) | (1 << 3) | (1 << 6) | (1 << 10) \ |
3387 | ECF_TM_PURE(1 << 12) | ATTR_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) |
3388 | #undef ATTR_COLD_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) |
3389 | #define ATTR_COLD_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) \ |
3390 | /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) |
3391 | #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) |
3392 | #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) \ |
3393 | /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST(1 << 3) | (1 << 6) | (1 << 10) |
3394 | #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST(1 << 0) | (1 << 3) | (1 << 6) | (1 << 10) |
3395 | #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST(1 << 0) | (1 << 3) | (1 << 6) | (1 << 10) \ |
3396 | /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST(1 << 0) | (1 << 3) | (1 << 6) | (1 << 10) |
3397 | #undef ATTR_PURE_NOTHROW_LEAF_LIST(1 << 1) | (1 << 6) | (1 << 10) |
3398 | #define ATTR_PURE_NOTHROW_LEAF_LIST(1 << 1) | (1 << 6) | (1 << 10) ECF_PURE(1 << 1) | ATTR_NOTHROW_LEAF_LIST(1 << 6) | (1 << 10) |
3399 | #undef DEF_BUILTIN_STUB |
3400 | #define DEF_BUILTIN_STUB(ENUM, NAME) |
3401 | #undef DEF_SANITIZER_BUILTIN_1 |
3402 | #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \ |
3403 | do { \ |
3404 | decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \ |
3405 | BUILT_IN_NORMAL, NAME, NULL_TREE(tree) nullptr); \ |
3406 | set_call_expr_flags (decl, ATTRS); \ |
3407 | set_builtin_decl (ENUM, decl, true); \ |
3408 | } while (0) |
3409 | #undef DEF_SANITIZER_BUILTIN |
3410 | #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \ |
3411 | DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS); |
3412 | |
3413 | #include "sanitizer.def" |
3414 | |
3415 | /* -fsanitize=object-size uses __builtin_object_size, but that might |
3416 | not be available for e.g. Fortran at this point. We use |
3417 | DEF_SANITIZER_BUILTIN here only as a convenience macro. */ |
3418 | if ((flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_OBJECT_SIZE) |
3419 | && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE)) |
3420 | DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size", |
3421 | BT_FN_SIZE_CONST_PTR_INT, |
3422 | ATTR_PURE_NOTHROW_LEAF_LIST(1 << 1) | (1 << 6) | (1 << 10)); |
3423 | |
3424 | #undef DEF_SANITIZER_BUILTIN_1 |
3425 | #undef DEF_SANITIZER_BUILTIN |
3426 | #undef DEF_BUILTIN_STUB |
3427 | } |
3428 | |
3429 | /* Called via htab_traverse. Count number of emitted |
3430 | STRING_CSTs in the constant hash table. */ |
3431 | |
3432 | int |
3433 | count_string_csts (constant_descriptor_tree **slot, |
3434 | unsigned HOST_WIDE_INTlong *data) |
3435 | { |
3436 | struct constant_descriptor_tree *desc = *slot; |
3437 | if (TREE_CODE (desc->value)((enum tree_code) (desc->value)->base.code) == STRING_CST |
3438 | && TREE_ASM_WRITTEN (desc->value)((desc->value)->base.asm_written_flag) |
3439 | && asan_protect_global (desc->value)) |
3440 | ++*data; |
3441 | return 1; |
3442 | } |
3443 | |
3444 | /* Helper structure to pass two parameters to |
3445 | add_string_csts. */ |
3446 | |
3447 | struct asan_add_string_csts_data |
3448 | { |
3449 | tree type; |
3450 | vec<constructor_elt, va_gc> *v; |
3451 | }; |
3452 | |
3453 | /* Called via hash_table::traverse. Call asan_add_global |
3454 | on emitted STRING_CSTs from the constant hash table. */ |
3455 | |
3456 | int |
3457 | add_string_csts (constant_descriptor_tree **slot, |
3458 | asan_add_string_csts_data *aascd) |
3459 | { |
3460 | struct constant_descriptor_tree *desc = *slot; |
3461 | if (TREE_CODE (desc->value)((enum tree_code) (desc->value)->base.code) == STRING_CST |
3462 | && TREE_ASM_WRITTEN (desc->value)((desc->value)->base.asm_written_flag) |
3463 | && asan_protect_global (desc->value)) |
3464 | { |
3465 | asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0))((__extension__ ({ __typeof (((((desc->rtl)->u.fld[0]). rt_rtx))) const _rtx = (((((desc->rtl)->u.fld[0]).rt_rtx ))); if (((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag ("CONSTANT_POOL_ADDRESS_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3465, __FUNCTION__); _rtx; })->unchanging) ? nullptr : ( ((((((desc->rtl)->u.fld[0]).rt_rtx)))->u.fld[1]).rt_tree )), |
3466 | aascd->type, aascd->v); |
3467 | } |
3468 | return 1; |
3469 | } |
3470 | |
3471 | /* Needs to be GTY(()), because cgraph_build_static_cdtor may |
3472 | invoke ggc_collect. */ |
3473 | static GTY(()) tree asan_ctor_statements; |
3474 | |
3475 | /* Module-level instrumentation. |
3476 | - Insert __asan_init_vN() into the list of CTORs. |
3477 | - TODO: insert redzones around globals. |
3478 | */ |
3479 | |
3480 | void |
3481 | asan_finish_file (void) |
3482 | { |
3483 | varpool_node *vnode; |
3484 | unsigned HOST_WIDE_INTlong gcount = 0; |
3485 | |
3486 | if (shadow_ptr_types[0] == NULL_TREE(tree) nullptr) |
3487 | asan_init_shadow_ptr_types (); |
3488 | /* Avoid instrumenting code in the asan ctors/dtors. |
3489 | We don't need to insert padding after the description strings, |
3490 | nor after .LASAN* array. */ |
3491 | flag_sanitizeglobal_options.x_flag_sanitize &= ~SANITIZE_ADDRESS; |
3492 | |
3493 | /* For user-space we want asan constructors to run first. |
3494 | Linux kernel does not support priorities other than default, and the only |
3495 | other user of constructors is coverage. So we run with the default |
3496 | priority. */ |
3497 | int priority = flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_USER_ADDRESS |
3498 | ? MAX_RESERVED_INIT_PRIORITY100 - 1 : DEFAULT_INIT_PRIORITY65535; |
3499 | |
3500 | if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_USER_ADDRESS) |
3501 | { |
3502 | tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT); |
3503 | append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements); |
3504 | fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK); |
3505 | append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements); |
3506 | } |
3507 | FOR_EACH_DEFINED_VARIABLE (vnode)for ((vnode) = symtab->first_defined_variable (); (vnode); (vnode) = symtab->next_defined_variable (vnode)) |
3508 | if (TREE_ASM_WRITTEN (vnode->decl)((vnode->decl)->base.asm_written_flag) |
3509 | && asan_protect_global (vnode->decl)) |
3510 | ++gcount; |
3511 | hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab (); |
3512 | const_desc_htab->traverse<unsigned HOST_WIDE_INTlong *, count_string_csts> |
3513 | (&gcount); |
3514 | if (gcount) |
3515 | { |
3516 | tree type = asan_global_struct (), var, ctor; |
3517 | tree dtor_statements = NULL_TREE(tree) nullptr; |
3518 | vec<constructor_elt, va_gc> *v; |
3519 | char buf[20]; |
3520 | |
3521 | type = build_array_type_nelts (type, gcount); |
3522 | ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0)do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy ( &(buf)[2], "LASAN"); sprint_ul (__p, (unsigned long) (0)) ; } while (0); |
3523 | var = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, get_identifier (buf)(__builtin_constant_p (buf) ? get_identifier_with_length ((buf ), strlen (buf)) : get_identifier (buf)), |
3524 | type); |
3525 | TREE_STATIC (var)((var)->base.static_flag) = 1; |
3526 | TREE_PUBLIC (var)((var)->base.public_flag) = 0; |
3527 | DECL_ARTIFICIAL (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3527, __FUNCTION__))->decl_common.artificial_flag) = 1; |
3528 | DECL_IGNORED_P (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3528, __FUNCTION__))->decl_common.ignored_flag) = 1; |
3529 | vec_alloc (v, gcount); |
3530 | FOR_EACH_DEFINED_VARIABLE (vnode)for ((vnode) = symtab->first_defined_variable (); (vnode); (vnode) = symtab->next_defined_variable (vnode)) |
3531 | if (TREE_ASM_WRITTEN (vnode->decl)((vnode->decl)->base.asm_written_flag) |
3532 | && asan_protect_global (vnode->decl)) |
3533 | asan_add_global (vnode->decl, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3533, __FUNCTION__))->typed.type), v); |
3534 | struct asan_add_string_csts_data aascd; |
3535 | aascd.type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3535, __FUNCTION__))->typed.type); |
3536 | aascd.v = v; |
3537 | const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts> |
3538 | (&aascd); |
3539 | ctor = build_constructor (type, v); |
3540 | TREE_CONSTANT (ctor)((non_type_check ((ctor), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3540, __FUNCTION__))->base.constant_flag) = 1; |
3541 | TREE_STATIC (ctor)((ctor)->base.static_flag) = 1; |
3542 | DECL_INITIAL (var)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3542, __FUNCTION__))->decl_common.initial) = ctor; |
3543 | SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),(((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3544, __FUNCTION__))->decl_common.align) = ffs_hwi ((((( (contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) - 1) : 0)) > ((1UL << 3) * (8)) ? ((((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) - 1) : 0)) : (( 1UL << 3) * (8))))) |
3544 | ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT))(((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3544, __FUNCTION__))->decl_common.align) = ffs_hwi ((((( (contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) - 1) : 0)) > ((1UL << 3) * (8)) ? ((((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3543, __FUNCTION__))->decl_common.align) - 1) : 0)) : (( 1UL << 3) * (8))))); |
3545 | |
3546 | varpool_node::finalize_decl (var); |
3547 | |
3548 | tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS); |
3549 | tree gcount_tree = build_int_cst (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], gcount); |
3550 | append_to_statement_list (build_call_expr (fn, 2, |
3551 | build_fold_addr_expr (var)build_fold_addr_expr_loc (((location_t) 0), (var)), |
3552 | gcount_tree), |
3553 | &asan_ctor_statements); |
3554 | |
3555 | fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS); |
3556 | append_to_statement_list (build_call_expr (fn, 2, |
3557 | build_fold_addr_expr (var)build_fold_addr_expr_loc (((location_t) 0), (var)), |
3558 | gcount_tree), |
3559 | &dtor_statements); |
3560 | cgraph_build_static_cdtor ('D', dtor_statements, priority); |
3561 | } |
3562 | if (asan_ctor_statements) |
3563 | cgraph_build_static_cdtor ('I', asan_ctor_statements, priority); |
3564 | flag_sanitizeglobal_options.x_flag_sanitize |= SANITIZE_ADDRESS; |
3565 | } |
3566 | |
3567 | /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based |
3568 | on SHADOW address. Newly added statements will be added to ITER with |
3569 | given location LOC. We mark SIZE bytes in shadow memory, where |
3570 | LAST_CHUNK_SIZE is greater than zero in situation where we are at the |
3571 | end of a variable. */ |
3572 | |
3573 | static void |
3574 | asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc, |
3575 | tree shadow, |
3576 | unsigned HOST_WIDE_INTlong base_addr_offset, |
3577 | bool is_clobber, unsigned size, |
3578 | unsigned last_chunk_size) |
3579 | { |
3580 | tree shadow_ptr_type; |
3581 | |
3582 | switch (size) |
3583 | { |
3584 | case 1: |
3585 | shadow_ptr_type = shadow_ptr_types[0]; |
3586 | break; |
3587 | case 2: |
3588 | shadow_ptr_type = shadow_ptr_types[1]; |
3589 | break; |
3590 | case 4: |
3591 | shadow_ptr_type = shadow_ptr_types[2]; |
3592 | break; |
3593 | default: |
3594 | gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3594, __FUNCTION__)); |
3595 | } |
3596 | |
3597 | unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE0xf8 : 0; |
3598 | unsigned HOST_WIDE_INTlong val = 0; |
3599 | unsigned last_pos = size; |
3600 | if (last_chunk_size && !is_clobber) |
3601 | last_pos = BYTES_BIG_ENDIAN0 ? 0 : size - 1; |
3602 | for (unsigned i = 0; i < size; ++i) |
3603 | { |
3604 | unsigned char shadow_c = c; |
3605 | if (i == last_pos) |
3606 | shadow_c = last_chunk_size; |
3607 | val |= (unsigned HOST_WIDE_INTlong) shadow_c << (BITS_PER_UNIT(8) * i); |
3608 | } |
3609 | |
3610 | /* Handle last chunk in unpoisoning. */ |
3611 | tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type)((contains_struct_check ((shadow_ptr_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3611, __FUNCTION__))->typed.type), val); |
3612 | |
3613 | tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type)((contains_struct_check ((shadow_ptr_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3613, __FUNCTION__))->typed.type), shadow, |
3614 | build_int_cst (shadow_ptr_type, base_addr_offset)); |
3615 | |
3616 | gimple *g = gimple_build_assign (dest, magic); |
3617 | gimple_set_location (g, loc); |
3618 | gsi_insert_after (iter, g, GSI_NEW_STMT); |
3619 | } |
3620 | |
3621 | /* Expand the ASAN_MARK builtins. */ |
3622 | |
3623 | bool |
3624 | asan_expand_mark_ifn (gimple_stmt_iterator *iter) |
3625 | { |
3626 | gimple *g = gsi_stmt (*iter); |
3627 | location_t loc = gimple_location (g); |
3628 | HOST_WIDE_INTlong flag = tree_to_shwi (gimple_call_arg (g, 0)); |
3629 | bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON; |
3630 | |
3631 | tree base = gimple_call_arg (g, 1); |
3632 | gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR)((void)(!(((enum tree_code) (base)->base.code) == ADDR_EXPR ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3632, __FUNCTION__), 0 : 0)); |
3633 | tree decl = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3633, __FUNCTION__))))); |
3634 | |
3635 | /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */ |
3636 | if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == COMPONENT_REF |
3637 | && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0))((tree_check (((*((const_cast<tree*> (tree_operand_check ((decl), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3637, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3637, __FUNCTION__, (VAR_DECL)))->base.default_def_flag)) |
3638 | decl = TREE_OPERAND (decl, 0)(*((const_cast<tree*> (tree_operand_check ((decl), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3638, __FUNCTION__))))); |
3639 | |
3640 | gcc_checking_assert (TREE_CODE (decl) == VAR_DECL)((void)(!(((enum tree_code) (decl)->base.code) == VAR_DECL ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3640, __FUNCTION__), 0 : 0)); |
3641 | |
3642 | if (hwasan_sanitize_p ()) |
3643 | { |
3644 | gcc_assert (param_hwasan_instrument_stack)((void)(!(global_options.x_param_hwasan_instrument_stack) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3644, __FUNCTION__), 0 : 0)); |
3645 | gimple_seq stmts = NULLnullptr; |
3646 | /* Here we swap ASAN_MARK calls for HWASAN_MARK. |
3647 | This is because we are using the approach of using ASAN_MARK as a |
3648 | synonym until here. |
3649 | That approach means we don't yet have to duplicate all the special |
3650 | cases for ASAN_MARK and ASAN_POISON with the exact same handling but |
3651 | called HWASAN_MARK etc. |
3652 | |
3653 | N.b. __asan_poison_stack_memory (which implements ASAN_MARK for ASAN) |
3654 | rounds the size up to its shadow memory granularity, while |
3655 | __hwasan_tag_memory (which implements the same for HWASAN) does not. |
3656 | Hence we emit HWASAN_MARK with an aligned size unlike ASAN_MARK. */ |
3657 | tree len = gimple_call_arg (g, 2); |
3658 | tree new_len = gimple_build_round_up (&stmts, loc, size_type_nodeglobal_trees[TI_SIZE_TYPE], len, |
3659 | HWASAN_TAG_GRANULE_SIZEtargetm.memtag.granule_size ()); |
3660 | gimple_build (&stmts, loc, CFN_HWASAN_MARK, |
3661 | void_type_nodeglobal_trees[TI_VOID_TYPE], gimple_call_arg (g, 0), |
3662 | base, new_len); |
3663 | gsi_replace_with_seq (iter, stmts, true); |
3664 | return false; |
3665 | } |
3666 | |
3667 | if (is_poison) |
3668 | { |
3669 | if (asan_handled_variables == NULLnullptr) |
3670 | asan_handled_variables = new hash_set<tree> (16); |
3671 | asan_handled_variables->add (decl); |
3672 | } |
3673 | tree len = gimple_call_arg (g, 2); |
3674 | |
3675 | gcc_assert (tree_fits_shwi_p (len))((void)(!(tree_fits_shwi_p (len)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3675, __FUNCTION__), 0 : 0)); |
3676 | unsigned HOST_WIDE_INTlong size_in_bytes = tree_to_shwi (len); |
3677 | gcc_assert (size_in_bytes)((void)(!(size_in_bytes) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3677, __FUNCTION__), 0 : 0)); |
3678 | |
3679 | g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3680 | NOP_EXPR, base); |
3681 | gimple_set_location (g, loc); |
3682 | gsi_replace (iter, g, false); |
3683 | tree base_addr = gimple_assign_lhs (g); |
3684 | |
3685 | /* Generate direct emission if size_in_bytes is small. */ |
3686 | if (size_in_bytes |
3687 | <= (unsigned)param_use_after_scope_direct_emission_thresholdglobal_options.x_param_use_after_scope_direct_emission_threshold) |
3688 | { |
3689 | const unsigned HOST_WIDE_INTlong shadow_size |
3690 | = shadow_mem_size (size_in_bytes); |
3691 | const unsigned int shadow_align |
Value stored to 'shadow_align' during its initialization is never read | |
3692 | = (get_pointer_alignment (base) / BITS_PER_UNIT(8)) >> ASAN_SHADOW_SHIFT3; |
3693 | |
3694 | tree shadow = build_shadow_mem_access (iter, loc, base_addr, |
3695 | shadow_ptr_types[0], true); |
3696 | |
3697 | for (unsigned HOST_WIDE_INTlong offset = 0; offset < shadow_size;) |
3698 | { |
3699 | unsigned size = 1; |
3700 | if (shadow_size - offset >= 4 |
3701 | && (!STRICT_ALIGNMENT0 || shadow_align >= 4)) |
3702 | size = 4; |
3703 | else if (shadow_size - offset >= 2 |
3704 | && (!STRICT_ALIGNMENT0 || shadow_align >= 2)) |
3705 | size = 2; |
3706 | |
3707 | unsigned HOST_WIDE_INTlong last_chunk_size = 0; |
3708 | unsigned HOST_WIDE_INTlong s = (offset + size) * ASAN_SHADOW_GRANULARITY(1UL << 3); |
3709 | if (s > size_in_bytes) |
3710 | last_chunk_size = ASAN_SHADOW_GRANULARITY(1UL << 3) - (s - size_in_bytes); |
3711 | |
3712 | asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison, |
3713 | size, last_chunk_size); |
3714 | offset += size; |
3715 | } |
3716 | } |
3717 | else |
3718 | { |
3719 | g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3720 | NOP_EXPR, len); |
3721 | gimple_set_location (g, loc); |
3722 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
3723 | tree sz_arg = gimple_assign_lhs (g); |
3724 | |
3725 | tree fun |
3726 | = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY |
3727 | : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY); |
3728 | g = gimple_build_call (fun, 2, base_addr, sz_arg); |
3729 | gimple_set_location (g, loc); |
3730 | gsi_insert_after (iter, g, GSI_NEW_STMT); |
3731 | } |
3732 | |
3733 | return false; |
3734 | } |
3735 | |
3736 | /* Expand the ASAN_{LOAD,STORE} builtins. */ |
3737 | |
3738 | bool |
3739 | asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls) |
3740 | { |
3741 | gcc_assert (!hwasan_sanitize_p ())((void)(!(!hwasan_sanitize_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3741, __FUNCTION__), 0 : 0)); |
3742 | gimple *g = gsi_stmt (*iter); |
3743 | location_t loc = gimple_location (g); |
3744 | bool recover_p; |
3745 | if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_USER_ADDRESS) |
3746 | recover_p = (flag_sanitize_recoverglobal_options.x_flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0; |
3747 | else |
3748 | recover_p = (flag_sanitize_recoverglobal_options.x_flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0; |
3749 | |
3750 | HOST_WIDE_INTlong flags = tree_to_shwi (gimple_call_arg (g, 0)); |
3751 | gcc_assert (flags < ASAN_CHECK_LAST)((void)(!(flags < ASAN_CHECK_LAST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3751, __FUNCTION__), 0 : 0)); |
3752 | bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0; |
3753 | bool is_store = (flags & ASAN_CHECK_STORE) != 0; |
3754 | bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0; |
3755 | |
3756 | tree base = gimple_call_arg (g, 1); |
3757 | tree len = gimple_call_arg (g, 2); |
3758 | HOST_WIDE_INTlong align = tree_to_shwi (gimple_call_arg (g, 3)); |
3759 | |
3760 | HOST_WIDE_INTlong size_in_bytes |
3761 | = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1; |
3762 | |
3763 | if (use_calls) |
3764 | { |
3765 | /* Instrument using callbacks. */ |
3766 | gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3767 | NOP_EXPR, base); |
3768 | gimple_set_location (g, loc); |
3769 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
3770 | tree base_addr = gimple_assign_lhs (g); |
3771 | |
3772 | int nargs; |
3773 | tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs); |
3774 | if (nargs == 1) |
3775 | g = gimple_build_call (fun, 1, base_addr); |
3776 | else |
3777 | { |
3778 | gcc_assert (nargs == 2)((void)(!(nargs == 2) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3778, __FUNCTION__), 0 : 0)); |
3779 | g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3780 | NOP_EXPR, len); |
3781 | gimple_set_location (g, loc); |
3782 | gsi_insert_before (iter, g, GSI_SAME_STMT); |
3783 | tree sz_arg = gimple_assign_lhs (g); |
3784 | g = gimple_build_call (fun, nargs, base_addr, sz_arg); |
3785 | } |
3786 | gimple_set_location (g, loc); |
3787 | gsi_replace (iter, g, false); |
3788 | return false; |
3789 | } |
3790 | |
3791 | HOST_WIDE_INTlong real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes; |
3792 | |
3793 | tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0]; |
3794 | tree shadow_type = TREE_TYPE (shadow_ptr_type)((contains_struct_check ((shadow_ptr_type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3794, __FUNCTION__))->typed.type); |
3795 | |
3796 | gimple_stmt_iterator gsi = *iter; |
3797 | |
3798 | if (!is_non_zero_len) |
3799 | { |
3800 | /* So, the length of the memory area to asan-protect is |
3801 | non-constant. Let's guard the generated instrumentation code |
3802 | like: |
3803 | |
3804 | if (len != 0) |
3805 | { |
3806 | //asan instrumentation code goes here. |
3807 | } |
3808 | // falltrough instructions, starting with *ITER. */ |
3809 | |
3810 | g = gimple_build_cond (NE_EXPR, |
3811 | len, |
3812 | build_int_cst (TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3812, __FUNCTION__))->typed.type), 0), |
3813 | NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr); |
3814 | gimple_set_location (g, loc); |
3815 | |
3816 | basic_block then_bb, fallthrough_bb; |
3817 | insert_if_then_before_iter (as_a <gcond *> (g), iter, |
3818 | /*then_more_likely_p=*/true, |
3819 | &then_bb, &fallthrough_bb); |
3820 | /* Note that fallthrough_bb starts with the statement that was |
3821 | pointed to by ITER. */ |
3822 | |
3823 | /* The 'then block' of the 'if (len != 0) condition is where |
3824 | we'll generate the asan instrumentation code now. */ |
3825 | gsi = gsi_last_bb (then_bb); |
3826 | } |
3827 | |
3828 | /* Get an iterator on the point where we can add the condition |
3829 | statement for the instrumentation. */ |
3830 | basic_block then_bb, else_bb; |
3831 | gsi = create_cond_insert_point (&gsi, /*before_p*/false, |
3832 | /*then_more_likely_p=*/false, |
3833 | /*create_then_fallthru_edge*/recover_p, |
3834 | &then_bb, |
3835 | &else_bb); |
3836 | |
3837 | g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3838 | NOP_EXPR, base); |
3839 | gimple_set_location (g, loc); |
3840 | gsi_insert_before (&gsi, g, GSI_NEW_STMT); |
3841 | tree base_addr = gimple_assign_lhs (g); |
3842 | |
3843 | tree t = NULL_TREE(tree) nullptr; |
3844 | if (real_size_in_bytes >= 8) |
3845 | { |
3846 | tree shadow = build_shadow_mem_access (&gsi, loc, base_addr, |
3847 | shadow_ptr_type); |
3848 | t = shadow; |
3849 | } |
3850 | else |
3851 | { |
3852 | /* Slow path for 1, 2 and 4 byte accesses. */ |
3853 | /* Test (shadow != 0) |
3854 | & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */ |
3855 | tree shadow = build_shadow_mem_access (&gsi, loc, base_addr, |
3856 | shadow_ptr_type); |
3857 | gimple *shadow_test = build_assign (NE_EXPR, shadow, 0); |
3858 | gimple_seq seq = NULLnullptr; |
3859 | gimple_seq_add_stmt (&seq, shadow_test); |
3860 | /* Aligned (>= 8 bytes) can test just |
3861 | (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known |
3862 | to be 0. */ |
3863 | if (align < 8) |
3864 | { |
3865 | gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, |
3866 | base_addr, 7)); |
3867 | gimple_seq_add_stmt (&seq, |
3868 | build_type_cast (shadow_type, |
3869 | gimple_seq_last (seq))); |
3870 | if (real_size_in_bytes > 1) |
3871 | gimple_seq_add_stmt (&seq, |
3872 | build_assign (PLUS_EXPR, |
3873 | gimple_seq_last (seq), |
3874 | real_size_in_bytes - 1)); |
3875 | t = gimple_assign_lhs (gimple_seq_last_stmt (seq)); |
3876 | } |
3877 | else |
3878 | t = build_int_cst (shadow_type, real_size_in_bytes - 1); |
3879 | gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow)); |
3880 | gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test, |
3881 | gimple_seq_last (seq))); |
3882 | t = gimple_assign_lhs (gimple_seq_last (seq)); |
3883 | gimple_seq_set_location (seq, loc); |
3884 | gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); |
3885 | |
3886 | /* For non-constant, misaligned or otherwise weird access sizes, |
3887 | check first and last byte. */ |
3888 | if (size_in_bytes == -1) |
3889 | { |
3890 | g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3891 | MINUS_EXPR, len, |
3892 | build_int_cst (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], 1)); |
3893 | gimple_set_location (g, loc); |
3894 | gsi_insert_after (&gsi, g, GSI_NEW_STMT); |
3895 | tree last = gimple_assign_lhs (g); |
3896 | g = gimple_build_assign (make_ssa_name (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE]), |
3897 | PLUS_EXPR, base_addr, last); |
3898 | gimple_set_location (g, loc); |
3899 | gsi_insert_after (&gsi, g, GSI_NEW_STMT); |
3900 | tree base_end_addr = gimple_assign_lhs (g); |
3901 | |
3902 | tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr, |
3903 | shadow_ptr_type); |
3904 | gimple *shadow_test = build_assign (NE_EXPR, shadow, 0); |
3905 | gimple_seq seq = NULLnullptr; |
3906 | gimple_seq_add_stmt (&seq, shadow_test); |
3907 | gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, |
3908 | base_end_addr, 7)); |
3909 | gimple_seq_add_stmt (&seq, build_type_cast (shadow_type, |
3910 | gimple_seq_last (seq))); |
3911 | gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, |
3912 | gimple_seq_last (seq), |
3913 | shadow)); |
3914 | gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test, |
3915 | gimple_seq_last (seq))); |
3916 | gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t, |
3917 | gimple_seq_last (seq))); |
3918 | t = gimple_assign_lhs (gimple_seq_last (seq)); |
3919 | gimple_seq_set_location (seq, loc); |
3920 | gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); |
3921 | } |
3922 | } |
3923 | |
3924 | g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3924, __FUNCTION__))->typed.type), 0), |
3925 | NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr); |
3926 | gimple_set_location (g, loc); |
3927 | gsi_insert_after (&gsi, g, GSI_NEW_STMT); |
3928 | |
3929 | /* Generate call to the run-time library (e.g. __asan_report_load8). */ |
3930 | gsi = gsi_start_bb (then_bb); |
3931 | int nargs; |
3932 | tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs); |
3933 | g = gimple_build_call (fun, nargs, base_addr, len); |
3934 | gimple_set_location (g, loc); |
3935 | gsi_insert_after (&gsi, g, GSI_NEW_STMT); |
3936 | |
3937 | gsi_remove (iter, true); |
3938 | *iter = gsi_start_bb (else_bb); |
3939 | |
3940 | return true; |
3941 | } |
3942 | |
3943 | /* Create ASAN shadow variable for a VAR_DECL which has been rewritten |
3944 | into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */ |
3945 | |
3946 | static tree |
3947 | create_asan_shadow_var (tree var_decl, |
3948 | hash_map<tree, tree> &shadow_vars_mapping) |
3949 | { |
3950 | tree *slot = shadow_vars_mapping.get (var_decl); |
3951 | if (slot == NULLnullptr) |
3952 | { |
3953 | tree shadow_var = copy_node (var_decl); |
3954 | |
3955 | copy_body_data id; |
3956 | memset (&id, 0, sizeof (copy_body_data)); |
3957 | id.src_fn = id.dst_fn = current_function_decl; |
3958 | copy_decl_for_dup_finish (&id, var_decl, shadow_var); |
3959 | |
3960 | DECL_ARTIFICIAL (shadow_var)((contains_struct_check ((shadow_var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3960, __FUNCTION__))->decl_common.artificial_flag) = 1; |
3961 | DECL_IGNORED_P (shadow_var)((contains_struct_check ((shadow_var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3961, __FUNCTION__))->decl_common.ignored_flag) = 1; |
3962 | DECL_SEEN_IN_BIND_EXPR_P (shadow_var)((contains_struct_check ((shadow_var), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3962, __FUNCTION__))->decl_with_vis.seen_in_bind_expr) = 0; |
3963 | gimple_add_tmp_var (shadow_var); |
3964 | |
3965 | shadow_vars_mapping.put (var_decl, shadow_var); |
3966 | return shadow_var; |
3967 | } |
3968 | else |
3969 | return *slot; |
3970 | } |
3971 | |
3972 | /* Expand ASAN_POISON ifn. */ |
3973 | |
3974 | bool |
3975 | asan_expand_poison_ifn (gimple_stmt_iterator *iter, |
3976 | bool *need_commit_edge_insert, |
3977 | hash_map<tree, tree> &shadow_vars_mapping) |
3978 | { |
3979 | gimple *g = gsi_stmt (*iter); |
3980 | tree poisoned_var = gimple_call_lhs (g); |
3981 | if (!poisoned_var || has_zero_uses (poisoned_var)) |
3982 | { |
3983 | gsi_remove (iter, true); |
3984 | return true; |
3985 | } |
3986 | |
3987 | if (SSA_NAME_VAR (poisoned_var)((tree_check ((poisoned_var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3987, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr || ((enum tree_code) ((poisoned_var)->ssa_name.var )->base.code) == IDENTIFIER_NODE ? (tree) nullptr : (poisoned_var )->ssa_name.var) == NULL_TREE(tree) nullptr) |
3988 | SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,do { tree var_ = (create_tmp_var (((contains_struct_check ((poisoned_var ), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__))->typed.type))); (tree_check ((poisoned_var ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__, (SSA_NAME)))->ssa_name.var = var_; ( tree_check ((poisoned_var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__, (SSA_NAME)))->base.public_flag = (var_ && ((enum tree_code) (var_)->base.code) == VAR_DECL && ((tree_check ((var_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__, (VAR_DECL)))->base.u.bits.saturating_flag )); } while (0) |
3989 | create_tmp_var (TREE_TYPE (poisoned_var)))do { tree var_ = (create_tmp_var (((contains_struct_check ((poisoned_var ), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__))->typed.type))); (tree_check ((poisoned_var ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__, (SSA_NAME)))->ssa_name.var = var_; ( tree_check ((poisoned_var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__, (SSA_NAME)))->base.public_flag = (var_ && ((enum tree_code) (var_)->base.code) == VAR_DECL && ((tree_check ((var_), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3989, __FUNCTION__, (VAR_DECL)))->base.u.bits.saturating_flag )); } while (0); |
3990 | |
3991 | tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var)((tree_check ((poisoned_var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3991, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr || ((enum tree_code) ((poisoned_var)->ssa_name.var )->base.code) == IDENTIFIER_NODE ? (tree) nullptr : (poisoned_var )->ssa_name.var), |
3992 | shadow_vars_mapping); |
3993 | |
3994 | bool recover_p; |
3995 | if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_USER_ADDRESS) |
3996 | recover_p = (flag_sanitize_recoverglobal_options.x_flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0; |
3997 | else |
3998 | recover_p = (flag_sanitize_recoverglobal_options.x_flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0; |
3999 | tree size = DECL_SIZE_UNIT (shadow_var)((contains_struct_check ((shadow_var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 3999, __FUNCTION__))->decl_common.size_unit); |
4000 | gimple *poison_call |
4001 | = gimple_build_call_internal (IFN_ASAN_MARK, 3, |
4002 | build_int_cst (integer_type_nodeinteger_types[itk_int], |
4003 | ASAN_MARK_POISON), |
4004 | build_fold_addr_expr (shadow_var)build_fold_addr_expr_loc (((location_t) 0), (shadow_var)), size); |
4005 | |
4006 | gimple *use; |
4007 | imm_use_iterator imm_iter; |
4008 | FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse ((((use) = first_imm_use_stmt (&(imm_iter), (poisoned_var ))), &(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)) ; (void) ((use) = next_imm_use_stmt (&(imm_iter)))) |
4009 | { |
4010 | if (is_gimple_debug (use)) |
4011 | continue; |
4012 | |
4013 | int nargs; |
4014 | bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE); |
4015 | gcall *call; |
4016 | if (hwasan_sanitize_p ()) |
4017 | { |
4018 | tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4); |
4019 | /* NOTE: hwasan has no __hwasan_report_* functions like asan does. |
4020 | We use __hwasan_tag_mismatch4 with arguments that tell it the |
4021 | size of access and load to report all tag mismatches. |
4022 | |
4023 | The arguments to this function are: |
4024 | Address of invalid access. |
4025 | Bitfield containing information about the access |
4026 | (access_info) |
4027 | Pointer to a frame of registers |
4028 | (for use in printing the contents of registers in a dump) |
4029 | Not used yet -- to be used by inline instrumentation. |
4030 | Size of access. |
4031 | |
4032 | The access_info bitfield encodes the following pieces of |
4033 | information: |
4034 | - Is this a store or load? |
4035 | access_info & 0x10 => store |
4036 | - Should the program continue after reporting the error? |
4037 | access_info & 0x20 => recover |
4038 | - What size access is this (not used here since we can always |
4039 | pass the size in the last argument) |
4040 | |
4041 | if (access_info & 0xf == 0xf) |
4042 | size is taken from last argument. |
4043 | else |
4044 | size == 1 << (access_info & 0xf) |
4045 | |
4046 | The last argument contains the size of the access iff the |
4047 | access_info size indicator is 0xf (we always use this argument |
4048 | rather than storing the size in the access_info bitfield). |
4049 | |
4050 | See the function definition `__hwasan_tag_mismatch4` in |
4051 | libsanitizer/hwasan for the full definition. |
4052 | */ |
4053 | unsigned access_info = (0x20 * recover_p) |
4054 | + (0x10 * store_p) |
4055 | + (0xf); |
4056 | call = gimple_build_call (fun, 4, |
4057 | build_fold_addr_expr (shadow_var)build_fold_addr_expr_loc (((location_t) 0), (shadow_var)), |
4058 | build_int_cst (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], |
4059 | access_info), |
4060 | build_int_cst (pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], 0), |
4061 | size); |
4062 | } |
4063 | else |
4064 | { |
4065 | tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size), |
4066 | &nargs); |
4067 | call = gimple_build_call (fun, 1, |
4068 | build_fold_addr_expr (shadow_var)build_fold_addr_expr_loc (((location_t) 0), (shadow_var))); |
4069 | } |
4070 | gimple_set_location (call, gimple_location (use)); |
4071 | gimple *call_to_insert = call; |
4072 | |
4073 | /* The USE can be a gimple PHI node. If so, insert the call on |
4074 | all edges leading to the PHI node. */ |
4075 | if (is_a <gphi *> (use)) |
4076 | { |
4077 | gphi *phi = dyn_cast<gphi *> (use); |
4078 | for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i) |
4079 | if (gimple_phi_arg_def (phi, i) == poisoned_var) |
4080 | { |
4081 | edge e = gimple_phi_arg_edge (phi, i); |
4082 | |
4083 | /* Do not insert on an edge we can't split. */ |
4084 | if (e->flags & EDGE_ABNORMAL) |
4085 | continue; |
4086 | |
4087 | if (call_to_insert == NULLnullptr) |
4088 | call_to_insert = gimple_copy (call); |
4089 | |
4090 | gsi_insert_seq_on_edge (e, call_to_insert); |
4091 | *need_commit_edge_insert = true; |
4092 | call_to_insert = NULLnullptr; |
4093 | } |
4094 | } |
4095 | else |
4096 | { |
4097 | gimple_stmt_iterator gsi = gsi_for_stmt (use); |
4098 | if (store_p) |
4099 | gsi_replace (&gsi, call, true); |
4100 | else |
4101 | gsi_insert_before (&gsi, call, GSI_NEW_STMT); |
4102 | } |
4103 | } |
4104 | |
4105 | SSA_NAME_IS_DEFAULT_DEF (poisoned_var)(tree_check ((poisoned_var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4105, __FUNCTION__, (SSA_NAME)))->base.default_def_flag = true; |
4106 | SSA_NAME_DEF_STMT (poisoned_var)(tree_check ((poisoned_var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4106, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt = gimple_build_nop (); |
4107 | gsi_replace (iter, poison_call, false); |
4108 | |
4109 | return true; |
4110 | } |
4111 | |
4112 | /* Instrument the current function. */ |
4113 | |
4114 | static unsigned int |
4115 | asan_instrument (void) |
4116 | { |
4117 | if (hwasan_sanitize_p ()) |
4118 | { |
4119 | transform_statements (); |
4120 | return 0; |
4121 | } |
4122 | |
4123 | if (shadow_ptr_types[0] == NULL_TREE(tree) nullptr) |
4124 | asan_init_shadow_ptr_types (); |
4125 | transform_statements (); |
4126 | last_alloca_addr = NULL_TREE(tree) nullptr; |
4127 | return 0; |
4128 | } |
4129 | |
4130 | static bool |
4131 | gate_asan (void) |
4132 | { |
4133 | return sanitize_flags_p (SANITIZE_ADDRESS); |
4134 | } |
4135 | |
4136 | namespace { |
4137 | |
4138 | const pass_data pass_data_asan = |
4139 | { |
4140 | GIMPLE_PASS, /* type */ |
4141 | "asan", /* name */ |
4142 | OPTGROUP_NONE, /* optinfo_flags */ |
4143 | TV_NONE, /* tv_id */ |
4144 | ( PROP_ssa(1 << 5) | PROP_cfg(1 << 3) | PROP_gimple_leh(1 << 2) ), /* properties_required */ |
4145 | 0, /* properties_provided */ |
4146 | 0, /* properties_destroyed */ |
4147 | 0, /* todo_flags_start */ |
4148 | TODO_update_ssa(1 << 11), /* todo_flags_finish */ |
4149 | }; |
4150 | |
4151 | class pass_asan : public gimple_opt_pass |
4152 | { |
4153 | public: |
4154 | pass_asan (gcc::context *ctxt) |
4155 | : gimple_opt_pass (pass_data_asan, ctxt) |
4156 | {} |
4157 | |
4158 | /* opt_pass methods: */ |
4159 | opt_pass * clone () { return new pass_asan (m_ctxt); } |
4160 | virtual bool gate (function *) { return gate_asan () || gate_hwasan (); } |
4161 | virtual unsigned int execute (function *) { return asan_instrument (); } |
4162 | |
4163 | }; // class pass_asan |
4164 | |
4165 | } // anon namespace |
4166 | |
4167 | gimple_opt_pass * |
4168 | make_pass_asan (gcc::context *ctxt) |
4169 | { |
4170 | return new pass_asan (ctxt); |
4171 | } |
4172 | |
4173 | namespace { |
4174 | |
4175 | const pass_data pass_data_asan_O0 = |
4176 | { |
4177 | GIMPLE_PASS, /* type */ |
4178 | "asan0", /* name */ |
4179 | OPTGROUP_NONE, /* optinfo_flags */ |
4180 | TV_NONE, /* tv_id */ |
4181 | ( PROP_ssa(1 << 5) | PROP_cfg(1 << 3) | PROP_gimple_leh(1 << 2) ), /* properties_required */ |
4182 | 0, /* properties_provided */ |
4183 | 0, /* properties_destroyed */ |
4184 | 0, /* todo_flags_start */ |
4185 | TODO_update_ssa(1 << 11), /* todo_flags_finish */ |
4186 | }; |
4187 | |
4188 | class pass_asan_O0 : public gimple_opt_pass |
4189 | { |
4190 | public: |
4191 | pass_asan_O0 (gcc::context *ctxt) |
4192 | : gimple_opt_pass (pass_data_asan_O0, ctxt) |
4193 | {} |
4194 | |
4195 | /* opt_pass methods: */ |
4196 | virtual bool gate (function *) |
4197 | { |
4198 | return !optimizeglobal_options.x_optimize && (gate_asan () || gate_hwasan ()); |
4199 | } |
4200 | virtual unsigned int execute (function *) { return asan_instrument (); } |
4201 | |
4202 | }; // class pass_asan_O0 |
4203 | |
4204 | } // anon namespace |
4205 | |
4206 | gimple_opt_pass * |
4207 | make_pass_asan_O0 (gcc::context *ctxt) |
4208 | { |
4209 | return new pass_asan_O0 (ctxt); |
4210 | } |
4211 | |
4212 | /* HWASAN */ |
4213 | |
4214 | /* For stack tagging: |
4215 | |
4216 | Return the offset from the frame base tag that the "next" expanded object |
4217 | should have. */ |
4218 | uint8_t |
4219 | hwasan_current_frame_tag () |
4220 | { |
4221 | return hwasan_frame_tag_offset; |
4222 | } |
4223 | |
4224 | /* For stack tagging: |
4225 | |
4226 | Return the 'base pointer' for this function. If that base pointer has not |
4227 | yet been created then we create a register to hold it and record the insns |
4228 | to initialize the register in `hwasan_frame_base_init_seq` for later |
4229 | emission. */ |
4230 | rtx |
4231 | hwasan_frame_base () |
4232 | { |
4233 | if (! hwasan_frame_base_ptr) |
4234 | { |
4235 | start_sequence (); |
4236 | hwasan_frame_base_ptr |
4237 | = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), |
4238 | targetm.memtag.insert_random_tag (virtual_stack_vars_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_STACK_ARGS]), |
4239 | NULL_RTX(rtx) 0)); |
4240 | hwasan_frame_base_init_seq = get_insns (); |
4241 | end_sequence (); |
4242 | } |
4243 | |
4244 | return hwasan_frame_base_ptr; |
4245 | } |
4246 | |
4247 | /* For stack tagging: |
4248 | |
4249 | Check whether this RTX is a standard pointer addressing the base of the |
4250 | stack variables for this frame. Returns true if the RTX is either |
4251 | virtual_stack_vars_rtx or hwasan_frame_base_ptr. */ |
4252 | bool |
4253 | stack_vars_base_reg_p (rtx base) |
4254 | { |
4255 | return base == virtual_stack_vars_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_STACK_ARGS]) || base == hwasan_frame_base_ptr; |
4256 | } |
4257 | |
4258 | /* For stack tagging: |
4259 | |
4260 | Emit frame base initialisation. |
4261 | If hwasan_frame_base has been used before here then |
4262 | hwasan_frame_base_init_seq contains the sequence of instructions to |
4263 | initialize it. This must be put just before the hwasan prologue, so we emit |
4264 | the insns before parm_birth_insn (which will point to the first instruction |
4265 | of the hwasan prologue if it exists). |
4266 | |
4267 | We update `parm_birth_insn` to point to the start of this initialisation |
4268 | since that represents the end of the initialisation done by |
4269 | expand_function_{start,end} functions and we want to maintain that. */ |
4270 | void |
4271 | hwasan_maybe_emit_frame_base_init () |
4272 | { |
4273 | if (! hwasan_frame_base_init_seq) |
4274 | return; |
4275 | emit_insn_before (hwasan_frame_base_init_seq, parm_birth_insn((&x_rtl)->x_parm_birth_insn)); |
4276 | parm_birth_insn((&x_rtl)->x_parm_birth_insn) = hwasan_frame_base_init_seq; |
4277 | } |
4278 | |
4279 | /* Record a compile-time constant size stack variable that HWASAN will need to |
4280 | tag. This record of the range of a stack variable will be used by |
4281 | `hwasan_emit_prologue` to emit the RTL at the start of each frame which will |
4282 | set tags in the shadow memory according to the assigned tag for each object. |
4283 | |
4284 | The range that the object spans in stack space should be described by the |
4285 | bounds `untagged_base + nearest_offset` and |
4286 | `untagged_base + farthest_offset`. |
4287 | `tagged_base` is the base address which contains the "base frame tag" for |
4288 | this frame, and from which the value to address this object with will be |
4289 | calculated. |
4290 | |
4291 | We record the `untagged_base` since the functions in the hwasan library we |
4292 | use to tag memory take pointers without a tag. */ |
4293 | void |
4294 | hwasan_record_stack_var (rtx untagged_base, rtx tagged_base, |
4295 | poly_int64 nearest_offset, poly_int64 farthest_offset) |
4296 | { |
4297 | hwasan_stack_var cur_var; |
4298 | cur_var.untagged_base = untagged_base; |
4299 | cur_var.tagged_base = tagged_base; |
4300 | cur_var.nearest_offset = nearest_offset; |
4301 | cur_var.farthest_offset = farthest_offset; |
4302 | cur_var.tag_offset = hwasan_current_frame_tag (); |
4303 | |
4304 | hwasan_tagged_stack_vars.safe_push (cur_var); |
4305 | } |
4306 | |
4307 | /* Return the RTX representing the farthest extent of the statically allocated |
4308 | stack objects for this frame. If hwasan_frame_base_ptr has not been |
4309 | initialized then we are not storing any static variables on the stack in |
4310 | this frame. In this case we return NULL_RTX to represent that. |
4311 | |
4312 | Otherwise simply return virtual_stack_vars_rtx + frame_offset. */ |
4313 | rtx |
4314 | hwasan_get_frame_extent () |
4315 | { |
4316 | return (hwasan_frame_base_ptr |
4317 | ? plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ( (scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode ::from_int) E_SImode))), virtual_stack_vars_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_STACK_ARGS]), frame_offset((&x_rtl)->x_frame_offset)) |
4318 | : NULL_RTX(rtx) 0); |
4319 | } |
4320 | |
4321 | /* For stack tagging: |
4322 | |
4323 | Increment the frame tag offset modulo the size a tag can represent. */ |
4324 | void |
4325 | hwasan_increment_frame_tag () |
4326 | { |
4327 | uint8_t tag_bits = HWASAN_TAG_SIZEtargetm.memtag.tag_size (); |
4328 | gcc_assert (HWASAN_TAG_SIZE((void)(!(targetm.memtag.tag_size () <= sizeof (hwasan_frame_tag_offset ) * 8) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4329, __FUNCTION__), 0 : 0)) |
4329 | <= sizeof (hwasan_frame_tag_offset) * CHAR_BIT)((void)(!(targetm.memtag.tag_size () <= sizeof (hwasan_frame_tag_offset ) * 8) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4329, __FUNCTION__), 0 : 0)); |
4330 | hwasan_frame_tag_offset = (hwasan_frame_tag_offset + 1) % (1 << tag_bits); |
4331 | /* The "background tag" of the stack is zero by definition. |
4332 | This is the tag that objects like parameters passed on the stack and |
4333 | spilled registers are given. It is handy to avoid this tag for objects |
4334 | whose tags we decide ourselves, partly to ensure that buffer overruns |
4335 | can't affect these important variables (e.g. saved link register, saved |
4336 | stack pointer etc) and partly to make debugging easier (everything with a |
4337 | tag of zero is space allocated automatically by the compiler). |
4338 | |
4339 | This is not feasible when using random frame tags (the default |
4340 | configuration for hwasan) since the tag for the given frame is randomly |
4341 | chosen at runtime. In order to avoid any tags matching the stack |
4342 | background we would need to decide tag offsets at runtime instead of |
4343 | compile time (and pay the resulting performance cost). |
4344 | |
4345 | When not using random base tags for each frame (i.e. when compiled with |
4346 | `--param hwasan-random-frame-tag=0`) the base tag for each frame is zero. |
4347 | This means the tag that each object gets is equal to the |
4348 | hwasan_frame_tag_offset used in determining it. |
4349 | When this is the case we *can* ensure no object gets the tag of zero by |
4350 | simply ensuring no object has the hwasan_frame_tag_offset of zero. |
4351 | |
4352 | There is the extra complication that we only record the |
4353 | hwasan_frame_tag_offset here (which is the offset from the tag stored in |
4354 | the stack pointer). In the kernel, the tag in the stack pointer is 0xff |
4355 | rather than zero. This does not cause problems since tags of 0xff are |
4356 | never checked in the kernel. As mentioned at the beginning of this |
4357 | comment the background tag of the stack is zero by definition, which means |
4358 | that for the kernel we should skip offsets of both 0 and 1 from the stack |
4359 | pointer. Avoiding the offset of 0 ensures we use a tag which will be |
4360 | checked, avoiding the offset of 1 ensures we use a tag that is not the |
4361 | same as the background. */ |
4362 | if (hwasan_frame_tag_offset == 0 && ! param_hwasan_random_frame_tagglobal_options.x_param_hwasan_random_frame_tag) |
4363 | hwasan_frame_tag_offset += 1; |
4364 | if (hwasan_frame_tag_offset == 1 && ! param_hwasan_random_frame_tagglobal_options.x_param_hwasan_random_frame_tag |
4365 | && sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS)) |
4366 | hwasan_frame_tag_offset += 1; |
4367 | } |
4368 | |
4369 | /* Clear internal state for the next function. |
4370 | This function is called before variables on the stack get expanded, in |
4371 | `init_vars_expansion`. */ |
4372 | void |
4373 | hwasan_record_frame_init () |
4374 | { |
4375 | delete asan_used_labels; |
4376 | asan_used_labels = NULLnullptr; |
4377 | |
4378 | /* If this isn't the case then some stack variable was recorded *before* |
4379 | hwasan_record_frame_init is called, yet *after* the hwasan prologue for |
4380 | the previous frame was emitted. Such stack variables would not have |
4381 | their shadow stack filled in. */ |
4382 | gcc_assert (hwasan_tagged_stack_vars.is_empty ())((void)(!(hwasan_tagged_stack_vars.is_empty ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4382, __FUNCTION__), 0 : 0)); |
4383 | hwasan_frame_base_ptr = NULL_RTX(rtx) 0; |
4384 | hwasan_frame_base_init_seq = NULLnullptr; |
4385 | |
4386 | /* When not using a random frame tag we can avoid the background stack |
4387 | color which gives the user a little better debug output upon a crash. |
4388 | Meanwhile, when using a random frame tag it will be nice to avoid adding |
4389 | tags for the first object since that is unnecessary extra work. |
4390 | Hence set the initial hwasan_frame_tag_offset to be 0 if using a random |
4391 | frame tag and 1 otherwise. |
4392 | |
4393 | As described in hwasan_increment_frame_tag, in the kernel the stack |
4394 | pointer has the tag 0xff. That means that to avoid 0xff and 0 (the tag |
4395 | which the kernel does not check and the background tag respectively) we |
4396 | start with a tag offset of 2. */ |
4397 | hwasan_frame_tag_offset = param_hwasan_random_frame_tagglobal_options.x_param_hwasan_random_frame_tag |
4398 | ? 0 |
4399 | : sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS) ? 2 : 1; |
4400 | } |
4401 | |
4402 | /* For stack tagging: |
4403 | (Emits HWASAN equivalent of what is emitted by |
4404 | `asan_emit_stack_protection`). |
4405 | |
4406 | Emits the extra prologue code to set the shadow stack as required for HWASAN |
4407 | stack instrumentation. |
4408 | |
4409 | Uses the vector of recorded stack variables hwasan_tagged_stack_vars. When |
4410 | this function has completed hwasan_tagged_stack_vars is empty and all |
4411 | objects it had pointed to are deallocated. */ |
4412 | void |
4413 | hwasan_emit_prologue () |
4414 | { |
4415 | /* We need untagged base pointers since libhwasan only accepts untagged |
4416 | pointers in __hwasan_tag_memory. We need the tagged base pointer to obtain |
4417 | the base tag for an offset. */ |
4418 | |
4419 | if (hwasan_tagged_stack_vars.is_empty ()) |
4420 | return; |
4421 | |
4422 | poly_int64 bot = 0, top = 0; |
4423 | for (hwasan_stack_var &cur : hwasan_tagged_stack_vars) |
4424 | { |
4425 | poly_int64 nearest = cur.nearest_offset; |
4426 | poly_int64 farthest = cur.farthest_offset; |
4427 | |
4428 | if (known_ge (nearest, farthest)(!maybe_lt (nearest, farthest))) |
4429 | { |
4430 | top = nearest; |
4431 | bot = farthest; |
4432 | } |
4433 | else |
4434 | { |
4435 | /* Given how these values are calculated, one must be known greater |
4436 | than the other. */ |
4437 | gcc_assert (known_le (nearest, farthest))((void)(!((!maybe_lt (farthest, nearest))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4437, __FUNCTION__), 0 : 0)); |
4438 | top = farthest; |
4439 | bot = nearest; |
4440 | } |
4441 | poly_int64 size = (top - bot); |
4442 | |
4443 | /* Assert the edge of each variable is aligned to the HWASAN tag granule |
4444 | size. */ |
4445 | gcc_assert (multiple_p (top, HWASAN_TAG_GRANULE_SIZE))((void)(!(multiple_p (top, targetm.memtag.granule_size ())) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4445, __FUNCTION__), 0 : 0)); |
4446 | gcc_assert (multiple_p (bot, HWASAN_TAG_GRANULE_SIZE))((void)(!(multiple_p (bot, targetm.memtag.granule_size ())) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4446, __FUNCTION__), 0 : 0)); |
4447 | gcc_assert (multiple_p (size, HWASAN_TAG_GRANULE_SIZE))((void)(!(multiple_p (size, targetm.memtag.granule_size ())) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4447, __FUNCTION__), 0 : 0)); |
4448 | |
4449 | rtx fn = init_one_libfunc ("__hwasan_tag_memory"); |
4450 | rtx base_tag = targetm.memtag.extract_tag (cur.tagged_base, NULL_RTX(rtx) 0); |
4451 | rtx tag = plus_constant (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), base_tag, cur.tag_offset); |
4452 | tag = hwasan_truncate_to_tag_size (tag, NULL_RTX(rtx) 0); |
4453 | |
4454 | rtx bottom = convert_memory_address (ptr_mode,convert_memory_address_addr_space ((ptr_mode), (plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), cur.untagged_base, bot )), 0) |
4455 | plus_constant (Pmode,convert_memory_address_addr_space ((ptr_mode), (plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), cur.untagged_base, bot )), 0) |
4456 | cur.untagged_base,convert_memory_address_addr_space ((ptr_mode), (plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), cur.untagged_base, bot )), 0) |
4457 | bot))convert_memory_address_addr_space ((ptr_mode), (plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ( (scalar_int_mode::from_int) E_SImode))), cur.untagged_base, bot )), 0); |
4458 | emit_library_call (fn, LCT_NORMAL, VOIDmode((void) 0, E_VOIDmode), |
4459 | bottom, ptr_mode, |
4460 | tag, QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), |
4461 | gen_int_mode (size, ptr_mode), ptr_mode); |
4462 | } |
4463 | /* Clear the stack vars, we've emitted the prologue for them all now. */ |
4464 | hwasan_tagged_stack_vars.truncate (0); |
4465 | } |
4466 | |
4467 | /* For stack tagging: |
4468 | |
4469 | Return RTL insns to clear the tags between DYNAMIC and VARS pointers |
4470 | into the stack. These instructions should be emitted at the end of |
4471 | every function. |
4472 | |
4473 | If `dynamic` is NULL_RTX then no insns are returned. */ |
4474 | rtx_insn * |
4475 | hwasan_emit_untag_frame (rtx dynamic, rtx vars) |
4476 | { |
4477 | if (! dynamic) |
4478 | return NULLnullptr; |
4479 | |
4480 | start_sequence (); |
4481 | |
4482 | dynamic = convert_memory_address (ptr_mode, dynamic)convert_memory_address_addr_space ((ptr_mode), (dynamic), 0); |
4483 | vars = convert_memory_address (ptr_mode, vars)convert_memory_address_addr_space ((ptr_mode), (vars), 0); |
4484 | |
4485 | rtx top_rtx; |
4486 | rtx bot_rtx; |
4487 | if (FRAME_GROWS_DOWNWARD1) |
4488 | { |
4489 | top_rtx = vars; |
4490 | bot_rtx = dynamic; |
4491 | } |
4492 | else |
4493 | { |
4494 | top_rtx = dynamic; |
4495 | bot_rtx = vars; |
4496 | } |
4497 | |
4498 | rtx size_rtx = expand_simple_binop (ptr_mode, MINUS, top_rtx, bot_rtx, |
4499 | NULL_RTX(rtx) 0, /* unsignedp = */0, |
4500 | OPTAB_DIRECT); |
4501 | |
4502 | rtx fn = init_one_libfunc ("__hwasan_tag_memory"); |
4503 | emit_library_call (fn, LCT_NORMAL, VOIDmode((void) 0, E_VOIDmode), |
4504 | bot_rtx, ptr_mode, |
4505 | HWASAN_STACK_BACKGROUNDgen_int_mode (0, (scalar_int_mode ((scalar_int_mode::from_int ) E_QImode))), QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), |
4506 | size_rtx, ptr_mode); |
4507 | |
4508 | do_pending_stack_adjust (); |
4509 | rtx_insn *insns = get_insns (); |
4510 | end_sequence (); |
4511 | return insns; |
4512 | } |
4513 | |
4514 | /* Needs to be GTY(()), because cgraph_build_static_cdtor may |
4515 | invoke ggc_collect. */ |
4516 | static GTY(()) tree hwasan_ctor_statements; |
4517 | |
4518 | /* Insert module initialization into this TU. This initialization calls the |
4519 | initialization code for libhwasan. */ |
4520 | void |
4521 | hwasan_finish_file (void) |
4522 | { |
4523 | /* Do not emit constructor initialization for the kernel. |
4524 | (the kernel has its own initialization already). */ |
4525 | if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_KERNEL_HWADDRESS) |
4526 | return; |
4527 | |
4528 | /* Avoid instrumenting code in the hwasan constructors/destructors. */ |
4529 | flag_sanitizeglobal_options.x_flag_sanitize &= ~SANITIZE_HWADDRESS; |
4530 | int priority = MAX_RESERVED_INIT_PRIORITY100 - 1; |
4531 | tree fn = builtin_decl_implicit (BUILT_IN_HWASAN_INIT); |
4532 | append_to_statement_list (build_call_expr (fn, 0), &hwasan_ctor_statements); |
4533 | cgraph_build_static_cdtor ('I', hwasan_ctor_statements, priority); |
4534 | flag_sanitizeglobal_options.x_flag_sanitize |= SANITIZE_HWADDRESS; |
4535 | } |
4536 | |
4537 | /* For stack tagging: |
4538 | |
4539 | Truncate `tag` to the number of bits that a tag uses (i.e. to |
4540 | HWASAN_TAG_SIZE). Store the result in `target` if it's convenient. */ |
4541 | rtx |
4542 | hwasan_truncate_to_tag_size (rtx tag, rtx target) |
4543 | { |
4544 | gcc_assert (GET_MODE (tag) == QImode)((void)(!(((machine_mode) (tag)->mode) == (scalar_int_mode ((scalar_int_mode::from_int) E_QImode))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4544, __FUNCTION__), 0 : 0)); |
4545 | if (HWASAN_TAG_SIZEtargetm.memtag.tag_size () != GET_MODE_PRECISION (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)))) |
4546 | { |
4547 | gcc_assert (GET_MODE_PRECISION (QImode) > HWASAN_TAG_SIZE)((void)(!(GET_MODE_PRECISION ((scalar_int_mode ((scalar_int_mode ::from_int) E_QImode))) > targetm.memtag.tag_size ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4547, __FUNCTION__), 0 : 0)); |
4548 | rtx mask = gen_int_mode ((HOST_WIDE_INT_1U1UL << HWASAN_TAG_SIZEtargetm.memtag.tag_size ()) - 1, |
4549 | QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode))); |
4550 | tag = expand_simple_binop (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), AND, tag, mask, target, |
4551 | /* unsignedp = */1, OPTAB_WIDEN); |
4552 | gcc_assert (tag)((void)(!(tag) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4552, __FUNCTION__), 0 : 0)); |
4553 | } |
4554 | return tag; |
4555 | } |
4556 | |
4557 | /* Construct a function tree for __hwasan_{load,store}{1,2,4,8,16,_n}. |
4558 | IS_STORE is either 1 (for a store) or 0 (for a load). */ |
4559 | static combined_fn |
4560 | hwasan_check_func (bool is_store, bool recover_p, HOST_WIDE_INTlong size_in_bytes, |
4561 | int *nargs) |
4562 | { |
4563 | static enum built_in_function check[2][2][6] |
4564 | = { { { BUILT_IN_HWASAN_LOAD1, BUILT_IN_HWASAN_LOAD2, |
4565 | BUILT_IN_HWASAN_LOAD4, BUILT_IN_HWASAN_LOAD8, |
4566 | BUILT_IN_HWASAN_LOAD16, BUILT_IN_HWASAN_LOADN }, |
4567 | { BUILT_IN_HWASAN_STORE1, BUILT_IN_HWASAN_STORE2, |
4568 | BUILT_IN_HWASAN_STORE4, BUILT_IN_HWASAN_STORE8, |
4569 | BUILT_IN_HWASAN_STORE16, BUILT_IN_HWASAN_STOREN } }, |
4570 | { { BUILT_IN_HWASAN_LOAD1_NOABORT, |
4571 | BUILT_IN_HWASAN_LOAD2_NOABORT, |
4572 | BUILT_IN_HWASAN_LOAD4_NOABORT, |
4573 | BUILT_IN_HWASAN_LOAD8_NOABORT, |
4574 | BUILT_IN_HWASAN_LOAD16_NOABORT, |
4575 | BUILT_IN_HWASAN_LOADN_NOABORT }, |
4576 | { BUILT_IN_HWASAN_STORE1_NOABORT, |
4577 | BUILT_IN_HWASAN_STORE2_NOABORT, |
4578 | BUILT_IN_HWASAN_STORE4_NOABORT, |
4579 | BUILT_IN_HWASAN_STORE8_NOABORT, |
4580 | BUILT_IN_HWASAN_STORE16_NOABORT, |
4581 | BUILT_IN_HWASAN_STOREN_NOABORT } } }; |
4582 | if (size_in_bytes == -1) |
4583 | { |
4584 | *nargs = 2; |
4585 | return as_combined_fn (check[recover_p][is_store][5]); |
4586 | } |
4587 | *nargs = 1; |
4588 | int size_log2 = exact_log2 (size_in_bytes); |
4589 | gcc_assert (size_log2 >= 0 && size_log2 <= 5)((void)(!(size_log2 >= 0 && size_log2 <= 5) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4589, __FUNCTION__), 0 : 0)); |
4590 | return as_combined_fn (check[recover_p][is_store][size_log2]); |
4591 | } |
4592 | |
4593 | /* Expand the HWASAN_{LOAD,STORE} builtins. */ |
4594 | bool |
4595 | hwasan_expand_check_ifn (gimple_stmt_iterator *iter, bool) |
4596 | { |
4597 | gimple *g = gsi_stmt (*iter); |
4598 | location_t loc = gimple_location (g); |
4599 | bool recover_p; |
4600 | if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_USER_HWADDRESS) |
4601 | recover_p = (flag_sanitize_recoverglobal_options.x_flag_sanitize_recover & SANITIZE_USER_HWADDRESS) != 0; |
4602 | else |
4603 | recover_p = (flag_sanitize_recoverglobal_options.x_flag_sanitize_recover & SANITIZE_KERNEL_HWADDRESS) != 0; |
4604 | |
4605 | HOST_WIDE_INTlong flags = tree_to_shwi (gimple_call_arg (g, 0)); |
4606 | gcc_assert (flags < ASAN_CHECK_LAST)((void)(!(flags < ASAN_CHECK_LAST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4606, __FUNCTION__), 0 : 0)); |
4607 | bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0; |
4608 | bool is_store = (flags & ASAN_CHECK_STORE) != 0; |
4609 | bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0; |
4610 | |
4611 | tree base = gimple_call_arg (g, 1); |
4612 | tree len = gimple_call_arg (g, 2); |
4613 | |
4614 | /* `align` is unused for HWASAN_CHECK, but we pass the argument anyway |
4615 | since that way the arguments match ASAN_CHECK. */ |
4616 | /* HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3)); */ |
4617 | |
4618 | unsigned HOST_WIDE_INTlong size_in_bytes |
4619 | = is_scalar_access ? tree_to_shwi (len) : -1; |
4620 | |
4621 | gimple_stmt_iterator gsi = *iter; |
4622 | |
4623 | if (!is_non_zero_len) |
4624 | { |
4625 | /* So, the length of the memory area to hwasan-protect is |
4626 | non-constant. Let's guard the generated instrumentation code |
4627 | like: |
4628 | |
4629 | if (len != 0) |
4630 | { |
4631 | // hwasan instrumentation code goes here. |
4632 | } |
4633 | // falltrough instructions, starting with *ITER. */ |
4634 | |
4635 | g = gimple_build_cond (NE_EXPR, |
4636 | len, |
4637 | build_int_cst (TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4637, __FUNCTION__))->typed.type), 0), |
4638 | NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr); |
4639 | gimple_set_location (g, loc); |
4640 | |
4641 | basic_block then_bb, fallthrough_bb; |
4642 | insert_if_then_before_iter (as_a <gcond *> (g), iter, |
4643 | /*then_more_likely_p=*/true, |
4644 | &then_bb, &fallthrough_bb); |
4645 | /* Note that fallthrough_bb starts with the statement that was |
4646 | pointed to by ITER. */ |
4647 | |
4648 | /* The 'then block' of the 'if (len != 0) condition is where |
4649 | we'll generate the hwasan instrumentation code now. */ |
4650 | gsi = gsi_last_bb (then_bb); |
4651 | } |
4652 | |
4653 | gimple_seq stmts = NULLnullptr; |
4654 | tree base_addr = gimple_build (&stmts, loc, NOP_EXPR, |
4655 | pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], base); |
4656 | |
4657 | int nargs = 0; |
4658 | combined_fn fn |
4659 | = hwasan_check_func (is_store, recover_p, size_in_bytes, &nargs); |
4660 | if (nargs == 1) |
4661 | gimple_build (&stmts, loc, fn, void_type_nodeglobal_trees[TI_VOID_TYPE], base_addr); |
4662 | else |
4663 | { |
4664 | gcc_assert (nargs == 2)((void)(!(nargs == 2) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4664, __FUNCTION__), 0 : 0)); |
4665 | tree sz_arg = gimple_build (&stmts, loc, NOP_EXPR, |
4666 | pointer_sized_int_nodeglobal_trees[TI_POINTER_SIZED_TYPE], len); |
4667 | gimple_build (&stmts, loc, fn, void_type_nodeglobal_trees[TI_VOID_TYPE], base_addr, sz_arg); |
4668 | } |
4669 | |
4670 | gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT); |
4671 | gsi_remove (iter, true); |
4672 | *iter = gsi; |
4673 | return false; |
4674 | } |
4675 | |
4676 | /* For stack tagging: |
4677 | |
4678 | Dummy: the HWASAN_MARK internal function should only ever be in the code |
4679 | after the sanopt pass. */ |
4680 | bool |
4681 | hwasan_expand_mark_ifn (gimple_stmt_iterator *) |
4682 | { |
4683 | gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/asan.c" , 4683, __FUNCTION__)); |
4684 | } |
4685 | |
4686 | bool |
4687 | gate_hwasan () |
4688 | { |
4689 | return hwasan_sanitize_p (); |
4690 | } |
4691 | |
4692 | #include "gt-asan.h" |