From 40dbe847f3767b3c2e70620a4324838b85d0224a Mon Sep 17 00:00:00 2001
From: marxin <mliska@suse.cz>
Date: Tue, 3 May 2016 15:35:22 +0200
Subject: [PATCH 1/2] Introduce -fsanitize-address-use-after-scope
gcc/c-family/ChangeLog:
2016-10-27 Martin Liska <mliska@suse.cz>
* c-warn.c (warn_for_unused_label): Save all labels used
in goto or in &label.
gcc/ChangeLog:
2016-10-27 Martin Liska <mliska@suse.cz>
* asan.c (enum asan_check_flags): Move the enum to header file.
(asan_init_shadow_ptr_types): Make type creation more generic.
(shadow_mem_size): New function.
(asan_emit_stack_protection): Use newly added ASAN_SHADOW_GRANULARITY.
Rewritten stack unpoisoning code.
(build_shadow_mem_access): Add new argument return_address.
(instrument_derefs): Instrument local variables if use after scope
sanitization is enabled.
(asan_store_shadow_bytes): New function.
(asan_expand_mark_ifn): Likewise.
(asan_sanitize_stack_p): Moved from asan_sanitize_stack_p.
* asan.h (enum asan_mark_flags): Moved here from asan.c
(asan_protect_stack_decl): Protect all declaration that need
to live in memory.
(asan_sanitize_use_after_scope): New function.
(asan_no_sanitize_address_p): Likewise.
* cfgexpand.c (partition_stack_vars): Consider
asan_sanitize_use_after_scope in condition.
(expand_stack_vars): Likewise.
* common.opt (-fsanitize-address-use-after-scope): New option.
* doc/invoke.texi (use-after-scope-direct-emission-threshold):
Explain the parameter.
* flag-types.h (enum sanitize_code): Define SANITIZE_USE_AFTER_SCOPE.
* gimplify.c (build_asan_poison_call_expr): New function.
(asan_poison_variable): Likewise.
(gimplify_bind_expr): Generate poisoning/unpoisoning for local
variables that have address taken.
(gimplify_decl_expr): Likewise.
(gimplify_target_expr): Likewise for C++ temporaries.
(sort_by_decl_uid): New function.
(gimplify_expr): Unpoison all variables for a label we can jump
from outside of a scope.
(gimplify_function_tree): Clear asan_poisoned_variables.
* internal-fn.c (expand_ASAN_MARK): New function.
* internal-fn.def (ASAN_MARK): Declare.
* opts.c (finish_options): Handle -fstack-reuse if
-fsanitize-address-use-after-scope is enabled.
(common_handle_option): Enable address sanitization if
-fsanitize-address-use-after-scope is enabled.
* params.def (PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD):
New parameter.
* params.h: Likewise.
* sancov.c (pass_sanopt::execute): Handle IFN_ASAN_MARK.
* sanitizer.def: Define __asan_poison_stack_memory and
__asan_unpoison_stack_memory functions.
---
gcc/asan.c | 287 +++++++++++++++++++++++++++++++++++++++++---------
gcc/asan.h | 66 ++++++++++--
gcc/c-family/c-warn.c | 9 +-
gcc/cfgexpand.c | 18 +---
gcc/common.opt | 3 +
gcc/doc/invoke.texi | 15 ++-
gcc/flag-types.h | 2 +-
gcc/gimplify.c | 198 +++++++++++++++++++++++++++++++---
gcc/internal-fn.c | 9 ++
gcc/internal-fn.def | 1 +
gcc/opts.c | 27 ++++-
gcc/params.def | 6 ++
gcc/params.h | 2 +
gcc/sanitizer.def | 4 +
gcc/sanopt.c | 3 +
15 files changed, 552 insertions(+), 98 deletions(-)
@@ -245,6 +245,13 @@ static unsigned HOST_WIDE_INT asan_shadow_offset_value;
static bool asan_shadow_offset_computed;
static vec<char *> sanitized_sections;
+/* Set of variable declarations that are going to be guarded by
+ use-after-scope sanitizer. */
+
+static hash_set<tree> *asan_handled_variables = NULL;
+
+hash_set <tree> *asan_used_labels = NULL;
+
/* Sets shadow offset to value in string VAL. */
bool
@@ -287,6 +294,14 @@ set_sanitized_sections (const char *sections)
}
}
+bool
+asan_sanitize_stack_p (void)
+{
+ return ((flag_sanitize & SANITIZE_ADDRESS)
+ && ASAN_STACK
+ && !asan_no_sanitize_address_p ());
+}
+
/* Checks whether section SEC should be sanitized. */
static bool
@@ -315,22 +330,13 @@ asan_shadow_offset ()
alias_set_type asan_shadow_set = -1;
-/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
+/* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
alias set is used for all shadow memory accesses. */
-static GTY(()) tree shadow_ptr_types[2];
+static GTY(()) tree shadow_ptr_types[3];
/* Decl for __asan_option_detect_stack_use_after_return. */
static GTY(()) tree asan_detect_stack_use_after_return;
-/* Various flags for Asan builtins. */
-enum asan_check_flags
-{
- ASAN_CHECK_STORE = 1 << 0,
- ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
- ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
- ASAN_CHECK_LAST = 1 << 3
-};
-
/* Hashtable support for memory references used by gimple
statements. */
@@ -933,12 +939,16 @@ static void
asan_init_shadow_ptr_types (void)
{
asan_shadow_set = new_alias_set ();
- shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
- TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
- shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
- shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
- TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
- shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
+ tree types[3] = { signed_char_type_node, short_integer_type_node,
+ integer_type_node };
+
+ for (unsigned i = 0; i < 3; i++)
+ {
+ shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
+ TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
+ shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
+ }
+
initialize_sanitizer_builtins ();
}
@@ -1022,6 +1032,15 @@ asan_function_start (void)
current_function_funcdef_no);
}
+/* Return number of shadow bytes that are occupied by a local variable
+ of SIZE bytes. */
+
+static unsigned HOST_WIDE_INT
+shadow_mem_size (unsigned HOST_WIDE_INT size)
+{
+ return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
+}
+
/* Insert code to protect stack vars. The prologue sequence should be emitted
directly, epilogue sequence returned. BASE is the register holding the
stack base, against which OFFSETS array offsets are relative to, OFFSETS
@@ -1047,7 +1066,7 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
HOST_WIDE_INT base_offset = offsets[length - 1];
HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
- HOST_WIDE_INT last_offset, last_size;
+ HOST_WIDE_INT last_offset;
int l;
unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
tree str_cst, decl, id;
@@ -1205,10 +1224,10 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
(aoff - prev_offset)
>> ASAN_SHADOW_SHIFT);
prev_offset = aoff;
- for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
+ for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
if (aoff < offset)
{
- if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
+ if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
shadow_bytes[i] = 0;
else
shadow_bytes[i] = offset - aoff;
@@ -1282,35 +1301,66 @@ asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
if (STRICT_ALIGNMENT)
set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
- prev_offset = base_offset;
+ /* Unpoison shadow memory of a stack at the very end of a function.
+ As we're poisoning stack variables at the end of their scope,
+ shadow memory must be properly unpoisoned here. The easiest approach
+ would be to collect all variables that should not be unpoisoned and
+ we unpoison shadow memory of the whole stack except ranges
+ occupied by these variables. */
last_offset = base_offset;
- last_size = 0;
- for (l = length; l; l -= 2)
+ HOST_WIDE_INT current_offset = last_offset;
+ if (length)
{
- offset = base_offset + ((offsets[l - 1] - base_offset)
- & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
- if (last_offset + last_size != offset)
+ HOST_WIDE_INT var_end_offset = 0;
+ HOST_WIDE_INT stack_start = offsets[length - 1];
+ gcc_assert (last_offset == stack_start);
+
+ for (int l = length - 2; l > 0; l -= 2)
{
- shadow_mem = adjust_address (shadow_mem, VOIDmode,
- (last_offset - prev_offset)
- >> ASAN_SHADOW_SHIFT);
- prev_offset = last_offset;
- asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
- last_offset = offset;
- last_size = 0;
+ HOST_WIDE_INT var_offset = offsets[l];
+ current_offset = var_offset;
+ var_end_offset = offsets[l - 1];
+ HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
+ BITS_PER_UNIT);
+
+ /* Should we unpoison the variable? */
+ if (asan_handled_variables != NULL
+ && asan_handled_variables->contains (decl))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ const char *n = (DECL_NAME (decl)
+ ? IDENTIFIER_POINTER (DECL_NAME (decl))
+ : "<unknown>");
+ fprintf (dump_file, "Unpoisoning shadow stack for variable: "
+ "%s (%" PRId64 "B)\n", n,
+ var_end_offset - var_offset);
+ }
+
+ unsigned HOST_WIDE_INT s
+ = shadow_mem_size (current_offset - last_offset);
+ asan_clear_shadow (shadow_mem, s);
+ HOST_WIDE_INT shift
+ = shadow_mem_size (current_offset - last_offset + rounded_size);
+ shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
+ last_offset = var_offset + rounded_size;
+ current_offset = last_offset;
+ }
+
}
- last_size += base_offset + ((offsets[l - 2] - base_offset)
- & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
- - offset;
- }
- if (last_size)
- {
- shadow_mem = adjust_address (shadow_mem, VOIDmode,
- (last_offset - prev_offset)
- >> ASAN_SHADOW_SHIFT);
- asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
+
+ /* Handle last redzone. */
+ current_offset = offsets[0];
+ asan_clear_shadow (shadow_mem,
+ shadow_mem_size (current_offset - last_offset));
}
+ /* Clean-up set with instrumented stack variables. */
+ delete asan_handled_variables;
+ asan_handled_variables = NULL;
+ delete asan_used_labels;
+ asan_used_labels = NULL;
+
do_pending_stack_adjust ();
if (lab)
emit_label (lab);
@@ -1590,12 +1640,14 @@ insert_if_then_before_iter (gcond *cond,
gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
}
-/* Build
- (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
+/* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
+ If RETURN_ADDRESS is set to true, return memory location instread
+ of a value in the shadow memory. */
static tree
build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
- tree base_addr, tree shadow_ptr_type)
+ tree base_addr, tree shadow_ptr_type,
+ bool return_address = false)
{
tree t, uintptr_type = TREE_TYPE (base_addr);
tree shadow_type = TREE_TYPE (shadow_ptr_type);
@@ -1618,11 +1670,15 @@ build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
gimple_set_location (g, location);
gsi_insert_after (gsi, g, GSI_NEW_STMT);
- t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
- build_int_cst (shadow_ptr_type, 0));
- g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
- gimple_set_location (g, location);
- gsi_insert_after (gsi, g, GSI_NEW_STMT);
+ if (!return_address)
+ {
+ t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
+ build_int_cst (shadow_ptr_type, 0));
+ g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
+ gimple_set_location (g, location);
+ gsi_insert_after (gsi, g, GSI_NEW_STMT);
+ }
+
return gimple_assign_lhs (g);
}
@@ -1826,7 +1882,9 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
{
/* Automatic vars in the current function will be always
accessible. */
- if (decl_function_context (inner) == current_function_decl)
+ if (decl_function_context (inner) == current_function_decl
+ && (!asan_sanitize_use_after_scope ()
+ || !TREE_ADDRESSABLE (inner)))
return;
}
/* Always instrument external vars, they might be dynamically
@@ -2576,6 +2634,131 @@ asan_finish_file (void)
flag_sanitize |= SANITIZE_ADDRESS;
}
+/* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
+ on SHADOW address. Newly added statements will be added to ITER with
+ given location LOC. We mark SIZE bytes in shadow memory, where
+ LAST_CHUNK_SIZE is greater than zero in situation where we are at the
+ end of a variable. */
+
+static void
+asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
+ tree shadow,
+ unsigned HOST_WIDE_INT base_addr_offset,
+ bool is_clobber, unsigned size,
+ unsigned last_chunk_size)
+{
+ tree shadow_ptr_type;
+
+ switch (size)
+ {
+ case 1:
+ shadow_ptr_type = shadow_ptr_types[0];
+ break;
+ case 2:
+ shadow_ptr_type = shadow_ptr_types[1];
+ break;
+ case 4:
+ shadow_ptr_type = shadow_ptr_types[2];
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
+ unsigned HOST_WIDE_INT val = 0;
+ for (unsigned i = 0; i < size; ++i)
+ {
+ unsigned char shadow_c = c;
+ if (i == size - 1 && last_chunk_size && !is_clobber)
+ shadow_c = last_chunk_size;
+ val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
+ }
+
+ /* Handle last chunk in unpoisoning. */
+ tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
+
+ tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
+ build_int_cst (shadow_ptr_type, base_addr_offset));
+
+ gimple *g = gimple_build_assign (dest, magic);
+ gimple_set_location (g, loc);
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+}
+
+/* Expand the ASAN_MARK builtins. */
+
+bool
+asan_expand_mark_ifn (gimple_stmt_iterator *iter)
+{
+ gimple *g = gsi_stmt (*iter);
+ location_t loc = gimple_location (g);
+ HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
+ gcc_assert (flags < ASAN_MARK_LAST);
+ bool is_clobber = (flags & ASAN_MARK_CLOBBER) != 0;
+
+ tree base = gimple_call_arg (g, 1);
+ gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
+ tree decl = TREE_OPERAND (base, 0);
+ gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
+ if (asan_handled_variables == NULL)
+ asan_handled_variables = new hash_set<tree> (16);
+ asan_handled_variables->add (decl);
+ tree len = gimple_call_arg (g, 2);
+
+ gcc_assert (tree_fits_shwi_p (len));
+ unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
+ gcc_assert (size_in_bytes);
+
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, base);
+ gimple_set_location (g, loc);
+ gsi_replace (iter, g, false);
+ tree base_addr = gimple_assign_lhs (g);
+
+ /* Generate direct emission if size_in_bytes is small. */
+ if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
+ {
+ unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
+
+ tree shadow = build_shadow_mem_access (iter, loc, base_addr,
+ shadow_ptr_types[0], true);
+
+ for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
+ {
+ unsigned size = 1;
+ if (shadow_size - offset >= 4)
+ size = 4;
+ else if (shadow_size - offset >= 2)
+ size = 2;
+
+ unsigned HOST_WIDE_INT last_chunk_size = 0;
+ unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
+ if (s > size_in_bytes)
+ last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
+
+ asan_store_shadow_bytes (iter, loc, shadow, offset, is_clobber,
+ size, last_chunk_size);
+ offset += size;
+ }
+ }
+ else
+ {
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, len);
+ gimple_set_location (g, loc);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree sz_arg = gimple_assign_lhs (g);
+
+ tree fun = builtin_decl_implicit (is_clobber ? BUILT_IN_ASAN_CLOBBER_N
+ : BUILT_IN_ASAN_UNCLOBBER_N);
+ g = gimple_build_call (fun, 2, base_addr, sz_arg);
+ gimple_set_location (g, loc);
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+ }
+
+ return false;
+}
+
/* Expand the ASAN_{LOAD,STORE} builtins. */
bool
@@ -29,6 +29,7 @@ extern bool asan_protect_global (tree);
extern void initialize_sanitizer_builtins (void);
extern tree asan_dynamic_init_call (bool);
extern bool asan_expand_check_ifn (gimple_stmt_iterator *, bool);
+extern bool asan_expand_mark_ifn (gimple_stmt_iterator *);
extern gimple_stmt_iterator create_cond_insert_point
(gimple_stmt_iterator *, bool, bool, bool, basic_block *, basic_block *);
@@ -36,9 +37,14 @@ extern gimple_stmt_iterator create_cond_insert_point
/* Alias set for accessing the shadow memory. */
extern alias_set_type asan_shadow_set;
+/* Hash set of labels that are either used in a goto, or their address
+ has been taken. */
+extern hash_set <tree> *asan_used_labels;
+
/* Shadow memory is found at
(address >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
#define ASAN_SHADOW_SHIFT 3
+#define ASAN_SHADOW_GRANULARITY (1UL << ASAN_SHADOW_SHIFT)
/* Red zone size, stack and global variables are padded by ASAN_RED_ZONE_SIZE
up to 2 * ASAN_RED_ZONE_SIZE - 1 bytes. */
@@ -50,22 +56,32 @@ extern alias_set_type asan_shadow_set;
the frame. Middle is for padding in between variables, right is
above the last protected variable and partial immediately after variables
up to ASAN_RED_ZONE_SIZE alignment. */
-#define ASAN_STACK_MAGIC_LEFT 0xf1
-#define ASAN_STACK_MAGIC_MIDDLE 0xf2
-#define ASAN_STACK_MAGIC_RIGHT 0xf3
-#define ASAN_STACK_MAGIC_PARTIAL 0xf4
-#define ASAN_STACK_MAGIC_USE_AFTER_RET 0xf5
+#define ASAN_STACK_MAGIC_LEFT 0xf1
+#define ASAN_STACK_MAGIC_MIDDLE 0xf2
+#define ASAN_STACK_MAGIC_RIGHT 0xf3
+#define ASAN_STACK_MAGIC_PARTIAL 0xf4
+#define ASAN_STACK_MAGIC_USE_AFTER_RET 0xf5
+#define ASAN_STACK_MAGIC_USE_AFTER_SCOPE 0xf8
#define ASAN_STACK_FRAME_MAGIC 0x41b58ab3
#define ASAN_STACK_RETIRED_MAGIC 0x45e0360e
-/* Return true if DECL should be guarded on the stack. */
-
-static inline bool
-asan_protect_stack_decl (tree decl)
+/* Various flags for Asan builtins. */
+enum asan_check_flags
{
- return DECL_P (decl) && !DECL_ARTIFICIAL (decl);
-}
+ ASAN_CHECK_STORE = 1 << 0,
+ ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
+ ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
+ ASAN_CHECK_LAST = 1 << 3
+};
+
+/* Flags for Asan check builtins. */
+enum asan_mark_flags
+{
+ ASAN_MARK_CLOBBER = 1 << 0,
+ ASAN_MARK_UNCLOBBER = 1 << 1,
+ ASAN_MARK_LAST = 1 << 2
+};
/* Return the size of padding needed to insert after a protected
decl of SIZE. */
@@ -81,6 +97,8 @@ extern bool set_asan_shadow_offset (const char *);
extern void set_sanitized_sections (const char *);
+extern bool asan_sanitize_stack_p (void);
+
/* Return TRUE if builtin with given FCODE will be intercepted by
libasan. */
@@ -105,4 +123,30 @@ asan_intercepted_p (enum built_in_function fcode)
|| fcode == BUILT_IN_STRNCMP
|| fcode == BUILT_IN_STRNCPY;
}
+
+/* Return TRUE if we should instrument for use-after-scope sanity checking. */
+
+static inline bool
+asan_sanitize_use_after_scope (void)
+{
+ return (flag_sanitize_address_use_after_scope && asan_sanitize_stack_p ());
+}
+
+static inline bool
+asan_no_sanitize_address_p (void)
+{
+ return lookup_attribute ("no_sanitize_address",
+ DECL_ATTRIBUTES (current_function_decl));
+}
+
+/* Return true if DECL should be guarded on the stack. */
+
+static inline bool
+asan_protect_stack_decl (tree decl)
+{
+ return DECL_P (decl)
+ && (!DECL_ARTIFICIAL (decl)
+ || (asan_sanitize_use_after_scope () && TREE_ADDRESSABLE (decl)));
+}
+
#endif /* TREE_ASAN */
@@ -28,7 +28,7 @@ along with GCC; see the file COPYING3. If not see
#include "tm_p.h"
#include "diagnostic.h"
#include "intl.h"
-
+#include "asan.h"
/* Print a warning if a constant expression had overflow in folding.
Invoke this function on every expression that the language
@@ -1627,6 +1627,13 @@ warn_for_unused_label (tree label)
else
warning (OPT_Wunused_label, "label %q+D declared but not defined", label);
}
+ else if (asan_sanitize_use_after_scope ())
+ {
+ if (asan_used_labels == NULL)
+ asan_used_labels = new hash_set<tree> (16);
+
+ asan_used_labels->add (label);
+ }
}
/* Warn for division by zero according to the value of DIVISOR. LOC
@@ -868,18 +868,6 @@ union_stack_vars (size_t a, size_t b)
}
}
-/* Return true if the current function should have its stack frame
- protected by address sanitizer. */
-
-static inline bool
-asan_sanitize_stack_p (void)
-{
- return ((flag_sanitize & SANITIZE_ADDRESS)
- && ASAN_STACK
- && !lookup_attribute ("no_sanitize_address",
- DECL_ATTRIBUTES (current_function_decl)));
-}
-
/* A subroutine of expand_used_vars. Binpack the variables into
partitions constrained by the interference graph. The overall
algorithm used is as follows:
@@ -941,7 +929,8 @@ partition_stack_vars (void)
sizes, as the shorter vars wouldn't be adequately protected.
Don't do that for "large" (unsupported) alignment objects,
those aren't protected anyway. */
- if (asan_sanitize_stack_p () && isize != jsize
+ if ((asan_sanitize_stack_p ())
+ && isize != jsize
&& ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
break;
@@ -1128,7 +1117,8 @@ expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
{
base = virtual_stack_vars_rtx;
- if (asan_sanitize_stack_p () && pred)
+ if ((asan_sanitize_stack_p ())
+ && pred)
{
HOST_WIDE_INT prev_offset
= align_base (frame_offset,
@@ -967,6 +967,9 @@ fsanitize-recover
Common Report
This switch is deprecated; use -fsanitize-recover= instead.
+fsanitize-address-use-after-scope
+Common Driver Report Var(flag_sanitize_address_use_after_scope) Init(0)
+
fsanitize-undefined-trap-on-error
Common Driver Report Var(flag_sanitize_undefined_trap_on_error) Init(0)
Use trap instead of a library function for undefined behavior sanitization.
@@ -10216,6 +10216,10 @@ is greater or equal to this number, use callbacks instead of inline checks.
E.g. to disable inline code use
@option{--param asan-instrumentation-with-call-threshold=0}.
+@item use-after-scope-direct-emission-threshold
+If size of a local variables in bytes is smaller of equal to this number,
+direct instruction emission is utilized to poison and unpoison local variables.
+
@item chkp-max-ctor-size
Static constructors generated by Pointer Bounds Checker may become very
large and significantly increase compile time at optimization level
@@ -10426,6 +10430,7 @@ thread-safe code.
Enable AddressSanitizer, a fast memory error detector.
Memory access instructions are instrumented to detect
out-of-bounds and use-after-free bugs.
+The option enables @option{-fsanitize-address-use-after-scope}.
See @uref{https://github.com/google/sanitizers/wiki/AddressSanitizer} for
more details. The run-time behavior can be influenced using the
@env{ASAN_OPTIONS} environment variable. When set to @code{help=1},
@@ -10437,6 +10442,7 @@ The option can't be combined with @option{-fsanitize=thread}.
@item -fsanitize=kernel-address
@opindex fsanitize=kernel-address
Enable AddressSanitizer for Linux kernel.
+The option enables @option{-fsanitize-address-use-after-scope}.
See @uref{https://github.com/google/kasan/wiki} for more details.
@item -fsanitize=thread
@@ -10636,8 +10642,8 @@ except for @option{-fsanitize=unreachable} and @option{-fsanitize=return}),
@option{-fsanitize=float-cast-overflow}, @option{-fsanitize=float-divide-by-zero},
@option{-fsanitize=bounds-strict},
@option{-fsanitize=kernel-address} and @option{-fsanitize=address}.
-For these sanitizers error recovery is turned on by default, except @option{-fsanitize=address},
-for which this feature is experimental.
+For these sanitizers error recovery is turned on by default,
+except @option{-fsanitize=address}, for which this feature is experimental.
@option{-fsanitize-recover=all} and @option{-fno-sanitize-recover=all} is also
accepted, the former enables recovery for all sanitizers that support it,
the latter disables recovery for all sanitizers that support it.
@@ -10659,6 +10665,11 @@ Similarly @option{-fno-sanitize-recover} is equivalent to
-fno-sanitize-recover=undefined,float-cast-overflow,float-divide-by-zero,bounds-strict
@end smallexample
+@item -fsanitize-address-use-after-scope
+@opindex fsanitize-address-use-after-scope
+Enable sanitization of local variables to detect use-after-scope bugs.
+The option sets @option{-fstack-reuse} to @samp{none}.
+
@item -fsanitize-undefined-trap-on-error
@opindex fsanitize-undefined-trap-on-error
The @option{-fsanitize-undefined-trap-on-error} option instructs the compiler to
@@ -237,7 +237,7 @@ enum sanitize_code {
| SANITIZE_RETURNS_NONNULL_ATTRIBUTE
| SANITIZE_OBJECT_SIZE | SANITIZE_VPTR,
SANITIZE_NONDEFAULT = SANITIZE_FLOAT_DIVIDE | SANITIZE_FLOAT_CAST
- | SANITIZE_BOUNDS_STRICT
+ | SANITIZE_BOUNDS_STRICT,
};
/* flag_vtable_verify initialization levels. */
@@ -59,6 +59,10 @@ along with GCC; see the file COPYING3. If not see
#include "gimple-walk.h"
#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
#include "builtins.h"
+#include "asan.h"
+
+/* Hash set of poisoned variables in a bind expr. */
+static hash_set<tree> *asan_poisoned_variables = NULL;
enum gimplify_omp_var_data
{
@@ -1088,6 +1092,79 @@ build_stack_save_restore (gcall **save, gcall **restore)
1, tmp_var);
}
+/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
+
+static tree
+build_asan_poison_call_expr (tree decl)
+{
+ /* Do not poison variables that have size equal to zero. */
+ tree unit_size = DECL_SIZE_UNIT (decl);
+ if (zerop (unit_size))
+ return NULL_TREE;
+
+ tree base = build_fold_addr_expr (decl);
+
+ return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
+ void_type_node, 3,
+ build_int_cst (integer_type_node,
+ ASAN_MARK_CLOBBER),
+ base, unit_size);
+}
+
+/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
+ on POISON flag, shadow memory of a DECL variable. The call will be
+ put on location identified by IT iterator, where BEFORE flag drives
+ position where the stmt will be put. */
+
+static void
+asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
+ bool before)
+{
+ /* When within an OMP context, do not emit ASAN_MARK internal fns. */
+ if (gimplify_omp_ctxp)
+ return;
+
+ tree unit_size = DECL_SIZE_UNIT (decl);
+ tree base = build_fold_addr_expr (decl);
+
+ /* Do not poison variables that have size equal to zero. */
+ if (zerop (unit_size))
+ return;
+
+ /* It's necessary to have all stack variables aligned to ASAN granularity
+ bytes. */
+ if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
+ SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
+
+ HOST_WIDE_INT flags = poison ? ASAN_MARK_CLOBBER : ASAN_MARK_UNCLOBBER;
+
+ gimple *g
+ = gimple_build_call_internal (IFN_ASAN_MARK, 3,
+ build_int_cst (integer_type_node, flags),
+ base, unit_size);
+
+ if (before)
+ gsi_insert_before (it, g, GSI_NEW_STMT);
+ else
+ gsi_insert_after (it, g, GSI_NEW_STMT);
+}
+
+/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
+ either poisons or unpoisons a DECL. Created statement is appended
+ to SEQ_P gimple sequence. */
+
+static void
+asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
+{
+ gimple_stmt_iterator it = gsi_last (*seq_p);
+ bool before = false;
+
+ if (gsi_end_p (it))
+ before = true;
+
+ asan_poison_variable (decl, poison, &it, before);
+}
+
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
@@ -1231,6 +1308,13 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
}
}
}
+
+ if (asan_poisoned_variables != NULL
+ && asan_poisoned_variables->contains (t))
+ {
+ asan_poisoned_variables->remove (t);
+ asan_poison_variable (t, true, &cleanup);
+ }
}
if (ret_clauses)
@@ -1475,13 +1559,27 @@ gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
if (VAR_P (decl) && !DECL_EXTERNAL (decl))
{
tree init = DECL_INITIAL (decl);
+ bool is_vla = false;
if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
|| (!TREE_STATIC (decl)
&& flag_stack_check == GENERIC_STACK_CHECK
&& compare_tree_int (DECL_SIZE_UNIT (decl),
STACK_CHECK_MAX_VAR_SIZE) > 0))
- gimplify_vla_decl (decl, seq_p);
+ {
+ gimplify_vla_decl (decl, seq_p);
+ is_vla = true;
+ }
+
+ if (asan_sanitize_use_after_scope ()
+ && !asan_no_sanitize_address_p ()
+ && !is_vla
+ && TREE_ADDRESSABLE (decl)
+ && !TREE_STATIC (decl))
+ {
+ asan_poisoned_variables->add (decl);
+ asan_poison_variable (decl, false, seq_p);
+ }
/* Some front ends do not explicitly declare all anonymous
artificial variables. We compensate here by declaring the
@@ -6165,6 +6263,9 @@ gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
tree init = TARGET_EXPR_INITIAL (targ);
enum gimplify_status ret;
+ bool unpoison_empty_seq = false;
+ gimple_stmt_iterator unpoison_it;
+
if (init)
{
tree cleanup = NULL_TREE;
@@ -6178,7 +6279,14 @@ gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
gimplify_vla_decl (temp, pre_p);
}
else
- gimple_add_tmp_var (temp);
+ {
+ /* Save location where we need to place unpoisoning. It's possible
+ that a variable will be converted to needs_to_live_in_memory. */
+ unpoison_it = gsi_last (*pre_p);
+ unpoison_empty_seq = gsi_end_p (unpoison_it);
+
+ gimple_add_tmp_var (temp);
+ }
/* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
expression is supposed to initialize the slot. */
@@ -6214,20 +6322,34 @@ gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
/* Add a clobber for the temporary going out of scope, like
gimplify_bind_expr. */
if (gimplify_ctxp->in_cleanup_point_expr
- && needs_to_live_in_memory (temp)
- && flag_stack_reuse == SR_ALL)
- {
- tree clobber = build_constructor (TREE_TYPE (temp),
- NULL);
- TREE_THIS_VOLATILE (clobber) = true;
- clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
- if (cleanup)
- cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
- clobber);
- else
- cleanup = clobber;
- }
+ && needs_to_live_in_memory (temp))
+ {
+ if (flag_stack_reuse == SR_ALL)
+ {
+ tree clobber = build_constructor (TREE_TYPE (temp),
+ NULL);
+ TREE_THIS_VOLATILE (clobber) = true;
+ clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
+ if (cleanup)
+ cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
+ clobber);
+ else
+ cleanup = clobber;
+ }
+ if (asan_sanitize_use_after_scope ())
+ {
+ tree asan_cleanup = build_asan_poison_call_expr (temp);
+ if (asan_cleanup)
+ {
+ if (unpoison_empty_seq)
+ unpoison_it = gsi_start (*pre_p);
+ asan_poison_variable (temp, false, &unpoison_it,
+ unpoison_empty_seq);
+ gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
+ }
+ }
+ }
if (cleanup)
gimple_push_cleanup (temp, cleanup, false, pre_p);
@@ -10734,6 +10856,25 @@ gimplify_omp_ordered (tree expr, gimple_seq body)
return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
}
+/* Sort pair of VAR_DECLs A and B by DECL_UID. */
+
+static int
+sort_by_decl_uid (const void *a, const void *b)
+{
+ const tree *t1 = (const tree *)a;
+ const tree *t2 = (const tree *)b;
+
+ int uid1 = DECL_UID (*t1);
+ int uid2 = DECL_UID (*t2);
+
+ if (uid1 < uid2)
+ return -1;
+ else if (uid1 > uid2)
+ return 1;
+ else
+ return 0;
+}
+
/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
expression produces a value to be used as an operand inside a GIMPLE
statement, the value will be stored back in *EXPR_P. This value will
@@ -10825,6 +10966,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
location_t saved_location;
enum gimplify_status ret;
gimple_stmt_iterator pre_last_gsi, post_last_gsi;
+ tree label;
save_expr = *expr_p;
if (save_expr == NULL_TREE)
@@ -11240,6 +11382,29 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
case LABEL_EXPR:
ret = gimplify_label_expr (expr_p, pre_p);
+ label = LABEL_EXPR_LABEL (*expr_p);
+ gcc_assert (decl_function_context (label) == current_function_decl);
+
+ /* If the label is used in a goto statement, or address of the label
+ is taken, we need to unpoison all variables that were seen so far.
+ Doing so would prevent us from reporting a false positives. */
+ if (asan_sanitize_use_after_scope ()
+ && asan_used_labels != NULL
+ && asan_used_labels->contains (label))
+ {
+ unsigned c = asan_poisoned_variables->elements ();
+ auto_vec<tree> sorted_variables (c);
+
+ for (hash_set<tree>::iterator it
+ = asan_poisoned_variables->begin ();
+ it != asan_poisoned_variables->end (); ++it)
+ sorted_variables.safe_push (*it);
+
+ sorted_variables.qsort (sort_by_decl_uid);
+
+ for (unsigned i = 0; i < sorted_variables.length (); ++i)
+ asan_poison_variable (sorted_variables[i], false, pre_p);
+ }
break;
case CASE_LABEL_EXPR:
@@ -12337,7 +12502,10 @@ gimplify_function_tree (tree fndecl)
&& !needs_to_live_in_memory (ret))
DECL_GIMPLE_REG_P (ret) = 1;
+ asan_poisoned_variables = new hash_set<tree> ();
bind = gimplify_body (fndecl, true);
+ delete asan_poisoned_variables;
+ asan_poisoned_variables = NULL;
/* The tree body of the function is no longer needed, replace it
with the new GIMPLE body. */
@@ -237,6 +237,15 @@ expand_ASAN_CHECK (internal_fn, gcall *)
gcc_unreachable ();
}
+/* This should get expanded in the sanopt pass. */
+
+static void
+expand_ASAN_MARK (internal_fn, gcall *)
+{
+ gcc_unreachable ();
+}
+
+
/* This should get expanded in the tsan pass. */
static void
@@ -158,6 +158,7 @@ DEF_INTERNAL_FN (UBSAN_OBJECT_SIZE, ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL)
DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, ".R...")
+DEF_INTERNAL_FN (ASAN_MARK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, ".R..")
DEF_INTERNAL_FN (ADD_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (SUB_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (MUL_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
@@ -978,6 +978,25 @@ finish_options (struct gcc_options *opts, struct gcc_options *opts_set,
opts->x_flag_aggressive_loop_optimizations = 0;
opts->x_flag_strict_overflow = 0;
}
+
+ /* Enable -fsanitize-address-use-after-scope if address sanitizer is
+ enabled. */
+ if (opts->x_flag_sanitize
+ && !opts_set->x_flag_sanitize_address_use_after_scope)
+ opts->x_flag_sanitize_address_use_after_scope = true;
+
+ /* Force -fstack-reuse=none in case -fsanitize-address-use-after-scope
+ is enabled. */
+ if (opts->x_flag_sanitize_address_use_after_scope)
+ {
+ if (opts->x_flag_stack_reuse != SR_NONE
+ && opts_set->x_flag_stack_reuse != SR_NONE)
+ error_at (loc,
+ "-fsanitize-address-use-after-scope requires "
+ "-fstack-reuse=none option");
+
+ opts->x_flag_stack_reuse = SR_NONE;
+ }
}
#define LEFT_COLUMN 27
@@ -1451,8 +1470,8 @@ const struct sanitizer_opts_s sanitizer_opts[] =
{
#define SANITIZER_OPT(name, flags, recover) \
{ #name, flags, sizeof #name - 1, recover }
- SANITIZER_OPT (address, SANITIZE_ADDRESS | SANITIZE_USER_ADDRESS, true),
- SANITIZER_OPT (kernel-address, SANITIZE_ADDRESS | SANITIZE_KERNEL_ADDRESS,
+ SANITIZER_OPT (address, (SANITIZE_ADDRESS | SANITIZE_USER_ADDRESS), true),
+ SANITIZER_OPT (kernel-address, (SANITIZE_ADDRESS | SANITIZE_KERNEL_ADDRESS),
true),
SANITIZER_OPT (thread, SANITIZE_THREAD, false),
SANITIZER_OPT (leak, SANITIZE_LEAK, false),
@@ -1780,6 +1799,10 @@ common_handle_option (struct gcc_options *opts,
/* Deferred. */
break;
+ case OPT_fsanitize_address_use_after_scope:
+ opts->x_flag_sanitize_address_use_after_scope = value;
+ break;
+
case OPT_fsanitize_recover:
if (value)
opts->x_flag_sanitize_recover
@@ -1156,6 +1156,12 @@ DEFPARAM (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD,
"in function becomes greater or equal to this number.",
7000, 0, INT_MAX)
+DEFPARAM (PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD,
+ "use-after-scope-direct-emission-threshold",
+ "Use direct poisoning/unpoisoning intructions for variables "
+ "smaller or equal to this number.",
+ 256, 0, INT_MAX)
+
DEFPARAM (PARAM_UNINIT_CONTROL_DEP_ATTEMPTS,
"uninit-control-dep-attempts",
"Maximum number of nested calls to search for control dependencies "
@@ -244,5 +244,7 @@ extern void init_param_values (int *params);
PARAM_VALUE (PARAM_ASAN_USE_AFTER_RETURN)
#define ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD \
PARAM_VALUE (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD)
+#define ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD \
+ ((unsigned) PARAM_VALUE (PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD))
#endif /* ! GCC_PARAMS_H */
@@ -165,6 +165,10 @@ DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT,
DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_AFTER_DYNAMIC_INIT,
"__asan_after_dynamic_init",
BT_FN_VOID, ATTR_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_CLOBBER_N, "__asan_poison_stack_memory",
+ BT_FN_VOID_PTR_PTRMODE, 0)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_UNCLOBBER_N, "__asan_unpoison_stack_memory",
+ BT_FN_VOID_PTR_PTRMODE, 0)
/* Thread Sanitizer */
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_INIT, "__tsan_init",
@@ -732,6 +732,9 @@ pass_sanopt::execute (function *fun)
case IFN_ASAN_CHECK:
no_next = asan_expand_check_ifn (&gsi, use_calls);
break;
+ case IFN_ASAN_MARK:
+ no_next = asan_expand_mark_ifn (&gsi);
+ break;
default:
break;
}
--
2.10.1