summaryrefslogtreecommitdiffstats
path: root/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99424.patch
diff options
context:
space:
mode:
Diffstat (limited to 'toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99424.patch')
-rw-r--r--toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99424.patch697
1 files changed, 697 insertions, 0 deletions
diff --git a/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99424.patch b/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99424.patch
new file mode 100644
index 0000000000..ac3a1e224d
--- /dev/null
+++ b/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99424.patch
@@ -0,0 +1,697 @@
1 Issue #1259
2
3 Backport from mainline:
4
5 gcc/
6 2010-10-22 Jie Zhang <jie@codesourcery.com>
7
8 * expr.c (emit_group_load_1): Update calls to extract_bit_field.
9 (copy_blkmode_from_reg): Likewise.
10 (read_complex_part): Likewise.
11 (expand_expr_real_1): Calculate packedp and pass it to
12 extract_bit_field.
13 * expr.h (extract_bit_field): Update declaration.
14 * calls.c (store_unaligned_arguments_into_pseudos): Update call
15 to extract_bit_field.
16 * expmed.c (extract_fixed_bit_field): Update calls to
17 extract_fixed_bit_field.
18 (store_split_bit_field): Likewise.
19 (extract_bit_field_1): Add new argument packedp.
20 (extract_bit_field): Add new argument packedp.
21 (extract_fixed_bit_field): Add new argument packedp and let
22 packed attribute override volatile.
23 * stmt.c (expand_return): Update call to extract_bit_field.
24
25 2010-10-15 Jie Zhang <jie@codesourcery.com>
26
27 * doc/invoke.texi: Add -fstrict-volatile-bitfields to
28 Option Summary and Index.
29
30 2010-07-13 DJ Delorie <dj@redhat.com>
31
32 * config/h8300/h8300.c (h8300_init_once): Default to
33 -fstrict_volatile_bitfields.
34
35 * config/sh/sh.c (sh_override_options): Default to
36 -fstrict_volatile_bitfields.
37
38 * config/rx/rx.c (rx_option_override): New.
39
40 * config/m32c/m32c.c (m32c_override_options): Default to
41 -fstrict_volatile_bitfields.
42
43 2010-06-16 DJ Delorie <dj@redhat.com>
44
45 * common.opt (-fstrict-volatile-bitfields): new.
46 * doc/invoke.texi: Document it.
47 * fold-const.c (optimize_bit_field_compare): For volatile
48 bitfields, use the field's type to determine the mode, not the
49 field's size.
50 * expr.c (expand_assignment): Likewise.
51 (get_inner_reference): Likewise.
52 (expand_expr_real_1): Likewise.
53 * expmed.c (store_fixed_bit_field): Likewise.
54 (extract_bit_field_1): Likewise.
55 (extract_fixed_bit_field): Likewise.
56
57 gcc/testsuite/
58 2010-08-19 Uros Bizjak <ubizjak@gmail.com>
59
60 PR testsuite/45324
61 * gcc.target/i386/volatile-bitfields-1.c: Also scan movb.
62
63 2010-06-16 DJ Delorie <dj@redhat.com>
64
65 * gcc.target/i386/volatile-bitfields-1.c: New.
66 * gcc.target/i386/volatile-bitfields-2.c: New.
67
68=== modified file 'gcc/calls.c'
69Index: gcc-4_5-branch/gcc/calls.c
70===================================================================
71--- gcc-4_5-branch.orig/gcc/calls.c 2012-03-06 13:05:56.524590011 -0800
72+++ gcc-4_5-branch/gcc/calls.c 2012-03-06 13:36:10.276677792 -0800
73@@ -878,7 +878,7 @@
74 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
75
76 args[i].aligned_regs[j] = reg;
77- word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
78+ word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
79 word_mode, word_mode);
80
81 /* There is no need to restrict this code to loading items
82Index: gcc-4_5-branch/gcc/common.opt
83===================================================================
84--- gcc-4_5-branch.orig/gcc/common.opt 2012-03-06 13:05:48.400589618 -0800
85+++ gcc-4_5-branch/gcc/common.opt 2012-03-06 13:36:35.608679018 -0800
86@@ -613,6 +613,10 @@
87 Common Report Var(flag_loop_block) Optimization
88 Enable Loop Blocking transformation
89
90+fstrict-volatile-bitfields
91+Common Report Var(flag_strict_volatile_bitfields) Init(-1)
92+Force bitfield accesses to match their type width
93+
94 fguess-branch-probability
95 Common Report Var(flag_guess_branch_prob) Optimization
96 Enable guessing of branch probabilities
97Index: gcc-4_5-branch/gcc/config/h8300/h8300.c
98===================================================================
99--- gcc-4_5-branch.orig/gcc/config/h8300/h8300.c 2012-03-06 11:53:30.000000000 -0800
100+++ gcc-4_5-branch/gcc/config/h8300/h8300.c 2012-03-06 13:36:35.528679014 -0800
101@@ -403,6 +403,10 @@
102 restore er6 though, so bump up the cost. */
103 h8300_move_ratio = 6;
104 }
105+
106+ /* This target defaults to strict volatile bitfields. */
107+ if (flag_strict_volatile_bitfields < 0)
108+ flag_strict_volatile_bitfields = 1;
109 }
110
111 /* Implement REG_CLASS_FROM_LETTER.
112Index: gcc-4_5-branch/gcc/config/m32c/m32c.c
113===================================================================
114--- gcc-4_5-branch.orig/gcc/config/m32c/m32c.c 2012-03-06 11:53:16.000000000 -0800
115+++ gcc-4_5-branch/gcc/config/m32c/m32c.c 2012-03-06 13:36:35.488679012 -0800
116@@ -428,6 +428,10 @@
117
118 if (TARGET_A24)
119 flag_ivopts = 0;
120+
121+ /* This target defaults to strict volatile bitfields. */
122+ if (flag_strict_volatile_bitfields < 0)
123+ flag_strict_volatile_bitfields = 1;
124 }
125
126 /* Defining data structures for per-function information */
127Index: gcc-4_5-branch/gcc/config/rx/rx.c
128===================================================================
129--- gcc-4_5-branch.orig/gcc/config/rx/rx.c 2012-03-06 11:53:17.000000000 -0800
130+++ gcc-4_5-branch/gcc/config/rx/rx.c 2012-03-06 13:36:35.508679013 -0800
131@@ -2417,6 +2417,14 @@
132 return ! TYPE_PACKED (record_type);
133 }
134
135+static void
136+rx_option_override (void)
137+{
138+ /* This target defaults to strict volatile bitfields. */
139+ if (flag_strict_volatile_bitfields < 0)
140+ flag_strict_volatile_bitfields = 1;
141+}
142+
143
144 /* Returns true if X a legitimate constant for an immediate
145 operand on the RX. X is already known to satisfy CONSTANT_P. */
146@@ -2794,6 +2802,9 @@
147 #undef TARGET_PROMOTE_FUNCTION_MODE
148 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
149
150+#undef TARGET_OPTION_OVERRIDE
151+#define TARGET_OPTION_OVERRIDE rx_option_override
152+
153 struct gcc_target targetm = TARGET_INITIALIZER;
154
155 /* #include "gt-rx.h" */
156Index: gcc-4_5-branch/gcc/config/sh/sh.c
157===================================================================
158--- gcc-4_5-branch.orig/gcc/config/sh/sh.c 2012-03-06 11:53:20.000000000 -0800
159+++ gcc-4_5-branch/gcc/config/sh/sh.c 2012-03-06 13:36:35.516679013 -0800
160@@ -950,6 +950,10 @@
161
162 if (sh_fixed_range_str)
163 sh_fix_range (sh_fixed_range_str);
164+
165+ /* This target defaults to strict volatile bitfields. */
166+ if (flag_strict_volatile_bitfields < 0)
167+ flag_strict_volatile_bitfields = 1;
168 }
169
170 /* Print the operand address in x to the stream. */
171Index: gcc-4_5-branch/gcc/doc/invoke.texi
172===================================================================
173--- gcc-4_5-branch.orig/gcc/doc/invoke.texi 2012-03-06 13:05:56.988590034 -0800
174+++ gcc-4_5-branch/gcc/doc/invoke.texi 2012-03-06 13:36:36.048679039 -0800
175@@ -922,7 +922,7 @@
176 -fargument-noalias-global -fargument-noalias-anything @gol
177 -fleading-underscore -ftls-model=@var{model} @gol
178 -ftrapv -fwrapv -fbounds-check @gol
179--fvisibility}
180+-fvisibility -fstrict-volatile-bitfields}
181 @end table
182
183 @menu
184@@ -17629,6 +17629,33 @@
185 An overview of these techniques, their benefits and how to use them
186 is at @w{@uref{http://gcc.gnu.org/wiki/Visibility}}.
187
188+@item -fstrict-volatile-bitfields
189+@opindex fstrict-volatile-bitfields
190+This option should be used if accesses to volatile bitfields (or other
191+structure fields, although the compiler usually honors those types
192+anyway) should use a single access in a mode of the same size as the
193+container's type, aligned to a natural alignment if possible. For
194+example, targets with memory-mapped peripheral registers might require
195+all such accesses to be 16 bits wide; with this flag the user could
196+declare all peripheral bitfields as ``unsigned short'' (assuming short
197+is 16 bits on these targets) to force GCC to use 16 bit accesses
198+instead of, perhaps, a more efficient 32 bit access.
199+
200+If this option is disabled, the compiler will use the most efficient
201+instruction. In the previous example, that might be a 32-bit load
202+instruction, even though that will access bytes that do not contain
203+any portion of the bitfield, or memory-mapped registers unrelated to
204+the one being updated.
205+
206+If the target requires strict alignment, and honoring the container
207+type would require violating this alignment, a warning is issued.
208+However, the access happens as the user requested, under the
209+assumption that the user knows something about the target hardware
210+that GCC is unaware of.
211+
212+The default value of this option is determined by the application binary
213+interface for the target processor.
214+
215 @end table
216
217 @c man end
218Index: gcc-4_5-branch/gcc/expmed.c
219===================================================================
220--- gcc-4_5-branch.orig/gcc/expmed.c 2012-03-06 13:05:56.876590028 -0800
221+++ gcc-4_5-branch/gcc/expmed.c 2012-03-06 13:36:35.104678993 -0800
222@@ -47,7 +47,7 @@
223 static rtx extract_fixed_bit_field (enum machine_mode, rtx,
224 unsigned HOST_WIDE_INT,
225 unsigned HOST_WIDE_INT,
226- unsigned HOST_WIDE_INT, rtx, int);
227+ unsigned HOST_WIDE_INT, rtx, int, bool);
228 static rtx mask_rtx (enum machine_mode, int, int, int);
229 static rtx lshift_value (enum machine_mode, rtx, int, int);
230 static rtx extract_split_bit_field (rtx, unsigned HOST_WIDE_INT,
231@@ -904,8 +904,14 @@
232 if (GET_MODE_BITSIZE (mode) == 0
233 || GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (word_mode))
234 mode = word_mode;
235- mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT,
236- MEM_ALIGN (op0), mode, MEM_VOLATILE_P (op0));
237+
238+ if (MEM_VOLATILE_P (op0)
239+ && GET_MODE_BITSIZE (GET_MODE (op0)) > 0
240+ && flag_strict_volatile_bitfields > 0)
241+ mode = GET_MODE (op0);
242+ else
243+ mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT,
244+ MEM_ALIGN (op0), mode, MEM_VOLATILE_P (op0));
245
246 if (mode == VOIDmode)
247 {
248@@ -1099,7 +1105,7 @@
249 endianness compensation) to fetch the piece we want. */
250 part = extract_fixed_bit_field (word_mode, value, 0, thissize,
251 total_bits - bitsize + bitsdone,
252- NULL_RTX, 1);
253+ NULL_RTX, 1, false);
254 }
255 else
256 {
257@@ -1110,7 +1116,7 @@
258 & (((HOST_WIDE_INT) 1 << thissize) - 1));
259 else
260 part = extract_fixed_bit_field (word_mode, value, 0, thissize,
261- bitsdone, NULL_RTX, 1);
262+ bitsdone, NULL_RTX, 1, false);
263 }
264
265 /* If OP0 is a register, then handle OFFSET here.
266@@ -1176,7 +1182,8 @@
267
268 static rtx
269 extract_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
270- unsigned HOST_WIDE_INT bitnum, int unsignedp, rtx target,
271+ unsigned HOST_WIDE_INT bitnum,
272+ int unsignedp, bool packedp, rtx target,
273 enum machine_mode mode, enum machine_mode tmode,
274 bool fallback_p)
275 {
276@@ -1378,6 +1385,14 @@
277 ? mode_for_size (bitsize, GET_MODE_CLASS (tmode), 0)
278 : mode);
279
280+ /* If the bitfield is volatile, we need to make sure the access
281+ remains on a type-aligned boundary. */
282+ if (GET_CODE (op0) == MEM
283+ && MEM_VOLATILE_P (op0)
284+ && GET_MODE_BITSIZE (GET_MODE (op0)) > 0
285+ && flag_strict_volatile_bitfields > 0)
286+ goto no_subreg_mode_swap;
287+
288 if (((bitsize >= BITS_PER_WORD && bitsize == GET_MODE_BITSIZE (mode)
289 && bitpos % BITS_PER_WORD == 0)
290 || (mode1 != BLKmode
291@@ -1450,7 +1465,7 @@
292 rtx result_part
293 = extract_bit_field (op0, MIN (BITS_PER_WORD,
294 bitsize - i * BITS_PER_WORD),
295- bitnum + bit_offset, 1, target_part, mode,
296+ bitnum + bit_offset, 1, false, target_part, mode,
297 word_mode);
298
299 gcc_assert (target_part);
300@@ -1649,7 +1664,7 @@
301 xop0 = adjust_address (op0, bestmode, xoffset);
302 xop0 = force_reg (bestmode, xop0);
303 result = extract_bit_field_1 (xop0, bitsize, xbitpos,
304- unsignedp, target,
305+ unsignedp, packedp, target,
306 mode, tmode, false);
307 if (result)
308 return result;
309@@ -1663,7 +1678,7 @@
310 return NULL;
311
312 target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
313- bitpos, target, unsignedp);
314+ bitpos, target, unsignedp, packedp);
315 return convert_extracted_bit_field (target, mode, tmode, unsignedp);
316 }
317
318@@ -1674,6 +1689,7 @@
319
320 STR_RTX is the structure containing the byte (a REG or MEM).
321 UNSIGNEDP is nonzero if this is an unsigned bit field.
322+ PACKEDP is nonzero if the field has the packed attribute.
323 MODE is the natural mode of the field value once extracted.
324 TMODE is the mode the caller would like the value to have;
325 but the value may be returned with type MODE instead.
326@@ -1685,10 +1701,10 @@
327
328 rtx
329 extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
330- unsigned HOST_WIDE_INT bitnum, int unsignedp, rtx target,
331- enum machine_mode mode, enum machine_mode tmode)
332+ unsigned HOST_WIDE_INT bitnum, int unsignedp, bool packedp,
333+ rtx target, enum machine_mode mode, enum machine_mode tmode)
334 {
335- return extract_bit_field_1 (str_rtx, bitsize, bitnum, unsignedp,
336+ return extract_bit_field_1 (str_rtx, bitsize, bitnum, unsignedp, packedp,
337 target, mode, tmode, true);
338 }
339
340@@ -1704,6 +1720,8 @@
341 which is significant on bigendian machines.)
342
343 UNSIGNEDP is nonzero for an unsigned bit field (don't sign-extend value).
344+ PACKEDP is true if the field has the packed attribute.
345+
346 If TARGET is nonzero, attempts to store the value there
347 and return TARGET, but this is not guaranteed.
348 If TARGET is not used, create a pseudo-reg of mode TMODE for the value. */
349@@ -1713,7 +1731,7 @@
350 unsigned HOST_WIDE_INT offset,
351 unsigned HOST_WIDE_INT bitsize,
352 unsigned HOST_WIDE_INT bitpos, rtx target,
353- int unsignedp)
354+ int unsignedp, bool packedp)
355 {
356 unsigned int total_bits = BITS_PER_WORD;
357 enum machine_mode mode;
358@@ -1730,8 +1748,19 @@
359 includes the entire field. If such a mode would be larger than
360 a word, we won't be doing the extraction the normal way. */
361
362- mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT,
363- MEM_ALIGN (op0), word_mode, MEM_VOLATILE_P (op0));
364+ if (MEM_VOLATILE_P (op0)
365+ && flag_strict_volatile_bitfields > 0)
366+ {
367+ if (GET_MODE_BITSIZE (GET_MODE (op0)) > 0)
368+ mode = GET_MODE (op0);
369+ else if (target && GET_MODE_BITSIZE (GET_MODE (target)) > 0)
370+ mode = GET_MODE (target);
371+ else
372+ mode = tmode;
373+ }
374+ else
375+ mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT,
376+ MEM_ALIGN (op0), word_mode, MEM_VOLATILE_P (op0));
377
378 if (mode == VOIDmode)
379 /* The only way this should occur is if the field spans word
380@@ -1752,12 +1781,67 @@
381 * BITS_PER_UNIT);
382 }
383
384- /* Get ref to an aligned byte, halfword, or word containing the field.
385- Adjust BITPOS to be position within a word,
386- and OFFSET to be the offset of that word.
387- Then alter OP0 to refer to that word. */
388- bitpos += (offset % (total_bits / BITS_PER_UNIT)) * BITS_PER_UNIT;
389- offset -= (offset % (total_bits / BITS_PER_UNIT));
390+ /* If we're accessing a volatile MEM, we can't do the next
391+ alignment step if it results in a multi-word access where we
392+ otherwise wouldn't have one. So, check for that case
393+ here. */
394+ if (MEM_P (op0)
395+ && MEM_VOLATILE_P (op0)
396+ && flag_strict_volatile_bitfields > 0
397+ && bitpos + bitsize <= total_bits
398+ && bitpos + bitsize + (offset % (total_bits / BITS_PER_UNIT)) * BITS_PER_UNIT > total_bits)
399+ {
400+ if (STRICT_ALIGNMENT)
401+ {
402+ static bool informed_about_misalignment = false;
403+ bool warned;
404+
405+ if (packedp)
406+ {
407+ if (bitsize == total_bits)
408+ warned = warning_at (input_location, OPT_fstrict_volatile_bitfields,
409+ "multiple accesses to volatile structure member"
410+ " because of packed attribute");
411+ else
412+ warned = warning_at (input_location, OPT_fstrict_volatile_bitfields,
413+ "multiple accesses to volatile structure bitfield"
414+ " because of packed attribute");
415+
416+ return extract_split_bit_field (op0, bitsize,
417+ bitpos + offset * BITS_PER_UNIT,
418+ unsignedp);
419+ }
420+
421+ if (bitsize == total_bits)
422+ warned = warning_at (input_location, OPT_fstrict_volatile_bitfields,
423+ "mis-aligned access used for structure member");
424+ else
425+ warned = warning_at (input_location, OPT_fstrict_volatile_bitfields,
426+ "mis-aligned access used for structure bitfield");
427+
428+ if (! informed_about_misalignment && warned)
429+ {
430+ informed_about_misalignment = true;
431+ inform (input_location,
432+ "When a volatile object spans multiple type-sized locations,"
433+ " the compiler must choose between using a single mis-aligned access to"
434+ " preserve the volatility, or using multiple aligned accesses to avoid"
435+ " runtime faults. This code may fail at runtime if the hardware does"
436+ " not allow this access.");
437+ }
438+ }
439+ }
440+ else
441+ {
442+
443+ /* Get ref to an aligned byte, halfword, or word containing the field.
444+ Adjust BITPOS to be position within a word,
445+ and OFFSET to be the offset of that word.
446+ Then alter OP0 to refer to that word. */
447+ bitpos += (offset % (total_bits / BITS_PER_UNIT)) * BITS_PER_UNIT;
448+ offset -= (offset % (total_bits / BITS_PER_UNIT));
449+ }
450+
451 op0 = adjust_address (op0, mode, offset);
452 }
453
454@@ -1966,7 +2050,7 @@
455 extract_fixed_bit_field wants offset in bytes. */
456 part = extract_fixed_bit_field (word_mode, word,
457 offset * unit / BITS_PER_UNIT,
458- thissize, thispos, 0, 1);
459+ thissize, thispos, 0, 1, false);
460 bitsdone += thissize;
461
462 /* Shift this part into place for the result. */
463Index: gcc-4_5-branch/gcc/expr.c
464===================================================================
465--- gcc-4_5-branch.orig/gcc/expr.c 2012-03-06 13:05:57.720590069 -0800
466+++ gcc-4_5-branch/gcc/expr.c 2012-03-06 13:40:14.504689612 -0800
467@@ -1749,7 +1749,7 @@
468 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
469 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
470 (bytepos % slen0) * BITS_PER_UNIT,
471- 1, NULL_RTX, mode, mode);
472+ 1, false, NULL_RTX, mode, mode);
473 }
474 else
475 {
476@@ -1759,7 +1759,7 @@
477 mem = assign_stack_temp (GET_MODE (src), slen, 0);
478 emit_move_insn (mem, src);
479 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
480- 0, 1, NULL_RTX, mode, mode);
481+ 0, 1, false, NULL_RTX, mode, mode);
482 }
483 }
484 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
485@@ -1800,7 +1800,7 @@
486 tmps[i] = src;
487 else
488 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
489- bytepos * BITS_PER_UNIT, 1, NULL_RTX,
490+ bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
491 mode, mode);
492
493 if (shift)
494@@ -2213,7 +2213,7 @@
495 bitpos for the destination store (left justified). */
496 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
497 extract_bit_field (src, bitsize,
498- xbitpos % BITS_PER_WORD, 1,
499+ xbitpos % BITS_PER_WORD, 1, false,
500 NULL_RTX, copy_mode, copy_mode));
501 }
502
503@@ -2291,7 +2291,7 @@
504 xbitpos for the destination store (right justified). */
505 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD, word_mode,
506 extract_bit_field (src_word, bitsize,
507- bitpos % BITS_PER_WORD, 1,
508+ bitpos % BITS_PER_WORD, 1, false,
509 NULL_RTX, word_mode, word_mode));
510 }
511
512@@ -3075,7 +3075,7 @@
513 }
514
515 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
516- true, NULL_RTX, imode, imode);
517+ true, false, NULL_RTX, imode, imode);
518 }
519
520 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
521@@ -4338,6 +4338,13 @@
522
523 to_rtx = expand_normal (tem);
524
525+ /* If the bitfield is volatile, we want to access it in the
526+ field's mode, not the computed mode. */
527+ if (volatilep
528+ && GET_CODE (to_rtx) == MEM
529+ && flag_strict_volatile_bitfields > 0)
530+ to_rtx = adjust_address (to_rtx, mode1, 0);
531+
532 if (offset != 0)
533 {
534 enum machine_mode address_mode;
535@@ -6106,6 +6113,12 @@
536 mode = DECL_MODE (field);
537 else if (DECL_MODE (field) == BLKmode)
538 blkmode_bitfield = true;
539+ else if (TREE_THIS_VOLATILE (exp)
540+ && flag_strict_volatile_bitfields > 0)
541+ /* Volatile bitfields should be accessed in the mode of the
542+ field's type, not the mode computed based on the bit
543+ size. */
544+ mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
545
546 *punsignedp = DECL_UNSIGNED (field);
547 }
548@@ -8978,6 +8991,7 @@
549 HOST_WIDE_INT bitsize, bitpos;
550 tree offset;
551 int volatilep = 0, must_force_mem;
552+ bool packedp = false;
553 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
554 &mode1, &unsignedp, &volatilep, true);
555 rtx orig_op0, memloc;
556@@ -8987,6 +9001,11 @@
557 infinitely recurse. */
558 gcc_assert (tem != exp);
559
560+ if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
561+ || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
562+ && DECL_PACKED (TREE_OPERAND (exp, 1))))
563+ packedp = true;
564+
565 /* If TEM's type is a union of variable size, pass TARGET to the inner
566 computation, since it will need a temporary and TARGET is known
567 to have to do. This occurs in unchecked conversion in Ada. */
568@@ -9003,6 +9022,14 @@
569 || modifier == EXPAND_STACK_PARM)
570 ? modifier : EXPAND_NORMAL);
571
572+
573+ /* If the bitfield is volatile, we want to access it in the
574+ field's mode, not the computed mode. */
575+ if (volatilep
576+ && GET_CODE (op0) == MEM
577+ && flag_strict_volatile_bitfields > 0)
578+ op0 = adjust_address (op0, mode1, 0);
579+
580 mode2
581 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
582
583@@ -9128,6 +9155,9 @@
584 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
585 && modifier != EXPAND_CONST_ADDRESS
586 && modifier != EXPAND_INITIALIZER)
587+ /* If the field is volatile, we always want an aligned
588+ access. */
589+ || (volatilep && flag_strict_volatile_bitfields > 0)
590 /* If the field isn't aligned enough to fetch as a memref,
591 fetch it as a bit field. */
592 || (mode1 != BLKmode
593@@ -9188,7 +9218,7 @@
594 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
595 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
596
597- op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
598+ op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
599 (modifier == EXPAND_STACK_PARM
600 ? NULL_RTX : target),
601 ext_mode, ext_mode);
602Index: gcc-4_5-branch/gcc/expr.h
603===================================================================
604--- gcc-4_5-branch.orig/gcc/expr.h 2012-03-06 11:53:32.000000000 -0800
605+++ gcc-4_5-branch/gcc/expr.h 2012-03-06 13:05:59.668590163 -0800
606@@ -804,7 +804,7 @@
607 extern void store_bit_field (rtx, unsigned HOST_WIDE_INT,
608 unsigned HOST_WIDE_INT, enum machine_mode, rtx);
609 extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
610- unsigned HOST_WIDE_INT, int, rtx,
611+ unsigned HOST_WIDE_INT, int, bool, rtx,
612 enum machine_mode, enum machine_mode);
613 extern rtx extract_low_bits (enum machine_mode, enum machine_mode, rtx);
614 extern rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int);
615Index: gcc-4_5-branch/gcc/fold-const.c
616===================================================================
617--- gcc-4_5-branch.orig/gcc/fold-const.c 2012-03-06 13:05:56.880590028 -0800
618+++ gcc-4_5-branch/gcc/fold-const.c 2012-03-06 13:36:03.276677454 -0800
619@@ -4215,11 +4215,16 @@
620
621 /* See if we can find a mode to refer to this field. We should be able to,
622 but fail if we can't. */
623- nmode = get_best_mode (lbitsize, lbitpos,
624- const_p ? TYPE_ALIGN (TREE_TYPE (linner))
625- : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
626- TYPE_ALIGN (TREE_TYPE (rinner))),
627- word_mode, lvolatilep || rvolatilep);
628+ if (lvolatilep
629+ && GET_MODE_BITSIZE (lmode) > 0
630+ && flag_strict_volatile_bitfields > 0)
631+ nmode = lmode;
632+ else
633+ nmode = get_best_mode (lbitsize, lbitpos,
634+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
635+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
636+ TYPE_ALIGN (TREE_TYPE (rinner))),
637+ word_mode, lvolatilep || rvolatilep);
638 if (nmode == VOIDmode)
639 return 0;
640
641Index: gcc-4_5-branch/gcc/stmt.c
642===================================================================
643--- gcc-4_5-branch.orig/gcc/stmt.c 2012-03-06 13:05:54.568589917 -0800
644+++ gcc-4_5-branch/gcc/stmt.c 2012-03-06 13:36:34.948678986 -0800
645@@ -1754,7 +1754,7 @@
646 xbitpos for the destination store (right justified). */
647 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
648 extract_bit_field (src, bitsize,
649- bitpos % BITS_PER_WORD, 1,
650+ bitpos % BITS_PER_WORD, 1, false,
651 NULL_RTX, word_mode, word_mode));
652 }
653
654Index: gcc-4_5-branch/gcc/testsuite/gcc.target/i386/volatile-bitfields-1.c
655===================================================================
656--- /dev/null 1970-01-01 00:00:00.000000000 +0000
657+++ gcc-4_5-branch/gcc/testsuite/gcc.target/i386/volatile-bitfields-1.c 2012-03-06 13:05:59.672590164 -0800
658@@ -0,0 +1,17 @@
659+/* { dg-do compile } */
660+/* { dg-options "-O2 -fstrict-volatile-bitfields" } */
661+
662+typedef struct {
663+ char a:1;
664+ char b:7;
665+ int c;
666+} BitStruct;
667+
668+volatile BitStruct bits;
669+
670+int foo ()
671+{
672+ return bits.b;
673+}
674+
675+/* { dg-final { scan-assembler "mov(b|zbl).*bits" } } */
676Index: gcc-4_5-branch/gcc/testsuite/gcc.target/i386/volatile-bitfields-2.c
677===================================================================
678--- /dev/null 1970-01-01 00:00:00.000000000 +0000
679+++ gcc-4_5-branch/gcc/testsuite/gcc.target/i386/volatile-bitfields-2.c 2012-03-06 13:05:59.672590164 -0800
680@@ -0,0 +1,17 @@
681+/* { dg-do compile } */
682+/* { dg-options "-O2 -fno-strict-volatile-bitfields" } */
683+
684+typedef struct {
685+ char a:1;
686+ char b:7;
687+ int c;
688+} BitStruct;
689+
690+volatile BitStruct bits;
691+
692+int foo ()
693+{
694+ return bits.b;
695+}
696+
697+/* { dg-final { scan-assembler "movl.*bits" } } */