summaryrefslogtreecommitdiffstats
path: root/meta-oe/recipes-devtools/gcc/gcc-4.6/linaro/gcc-4.6-linaro-r106827.patch
diff options
context:
space:
mode:
Diffstat (limited to 'meta-oe/recipes-devtools/gcc/gcc-4.6/linaro/gcc-4.6-linaro-r106827.patch')
-rw-r--r--meta-oe/recipes-devtools/gcc/gcc-4.6/linaro/gcc-4.6-linaro-r106827.patch628
1 files changed, 628 insertions, 0 deletions
diff --git a/meta-oe/recipes-devtools/gcc/gcc-4.6/linaro/gcc-4.6-linaro-r106827.patch b/meta-oe/recipes-devtools/gcc/gcc-4.6/linaro/gcc-4.6-linaro-r106827.patch
new file mode 100644
index 0000000000..d49ebab600
--- /dev/null
+++ b/meta-oe/recipes-devtools/gcc/gcc-4.6/linaro/gcc-4.6-linaro-r106827.patch
@@ -0,0 +1,628 @@
12011-10-17 Michael Hope <michael.hope@linaro.org>
2
3 Backport from mainline r178852:
4
5 2011-09-14 Julian Brown <julian@codesourcery.com>
6
7 gcc/
8 * config/arm/arm.c (arm_override_options): Add unaligned_access
9 support.
10 (arm_file_start): Emit attribute for unaligned access as appropriate.
11 * config/arm/arm.md (UNSPEC_UNALIGNED_LOAD)
12 (UNSPEC_UNALIGNED_STORE): Add constants for unspecs.
13 (insv, extzv): Add unaligned-access support.
14 (extv): Change to expander. Likewise.
15 (extzv_t1, extv_regsi): Add helpers.
16 (unaligned_loadsi, unaligned_loadhis, unaligned_loadhiu)
17 (unaligned_storesi, unaligned_storehi): New.
18 (*extv_reg): New (previous extv implementation).
19 * config/arm/arm.opt (munaligned_access): Add option.
20 * config/arm/constraints.md (Uw): New constraint.
21 * expmed.c (store_bit_field_1): Adjust bitfield numbering according
22 to size of access, not size of unit, when BITS_BIG_ENDIAN !=
23 BYTES_BIG_ENDIAN. Don't use bitfield accesses for
24 volatile accesses when -fstrict-volatile-bitfields is in effect.
25 (extract_bit_field_1): Likewise.
26
27 Backport from mainline r172697:
28
29 2011-04-19 Wei Guozhi <carrot@google.com>
30
31 PR target/47855
32 gcc/
33 * config/arm/arm-protos.h (thumb1_legitimate_address_p): New prototype.
34 * config/arm/arm.c (thumb1_legitimate_address_p): Remove the static
35 linkage.
36 * config/arm/constraints.md (Uu): New constraint.
37 * config/arm/arm.md (*arm_movqi_insn): Compute attr "length".
38
39=== modified file 'gcc/config/arm/arm-protos.h'
40--- old/gcc/config/arm/arm-protos.h 2011-10-03 09:46:40 +0000
41+++ new/gcc/config/arm/arm-protos.h 2011-10-11 01:56:19 +0000
42@@ -59,6 +59,7 @@
43 int);
44 extern rtx thumb_legitimize_reload_address (rtx *, enum machine_mode, int, int,
45 int);
46+extern int thumb1_legitimate_address_p (enum machine_mode, rtx, int);
47 extern int arm_const_double_rtx (rtx);
48 extern int neg_const_double_rtx_ok_for_fpa (rtx);
49 extern int vfp3_const_double_rtx (rtx);
50
51=== modified file 'gcc/config/arm/arm.c'
52--- old/gcc/config/arm/arm.c 2011-10-03 09:46:40 +0000
53+++ new/gcc/config/arm/arm.c 2011-10-11 02:31:01 +0000
54@@ -2065,6 +2065,28 @@
55 fix_cm3_ldrd = 0;
56 }
57
58+ /* Enable -munaligned-access by default for
59+ - all ARMv6 architecture-based processors
60+ - ARMv7-A, ARMv7-R, and ARMv7-M architecture-based processors.
61+
62+ Disable -munaligned-access by default for
63+ - all pre-ARMv6 architecture-based processors
64+ - ARMv6-M architecture-based processors. */
65+
66+ if (unaligned_access == 2)
67+ {
68+ if (arm_arch6 && (arm_arch_notm || arm_arch7))
69+ unaligned_access = 1;
70+ else
71+ unaligned_access = 0;
72+ }
73+ else if (unaligned_access == 1
74+ && !(arm_arch6 && (arm_arch_notm || arm_arch7)))
75+ {
76+ warning (0, "target CPU does not support unaligned accesses");
77+ unaligned_access = 0;
78+ }
79+
80 if (TARGET_THUMB1 && flag_schedule_insns)
81 {
82 /* Don't warn since it's on by default in -O2. */
83@@ -6106,7 +6128,7 @@
84 addresses based on the frame pointer or arg pointer until the
85 reload pass starts. This is so that eliminating such addresses
86 into stack based ones won't produce impossible code. */
87-static int
88+int
89 thumb1_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
90 {
91 /* ??? Not clear if this is right. Experiment. */
92@@ -22226,6 +22248,10 @@
93 val = 6;
94 asm_fprintf (asm_out_file, "\t.eabi_attribute 30, %d\n", val);
95
96+ /* Tag_CPU_unaligned_access. */
97+ asm_fprintf (asm_out_file, "\t.eabi_attribute 34, %d\n",
98+ unaligned_access);
99+
100 /* Tag_ABI_FP_16bit_format. */
101 if (arm_fp16_format)
102 asm_fprintf (asm_out_file, "\t.eabi_attribute 38, %d\n",
103
104=== modified file 'gcc/config/arm/arm.md'
105--- old/gcc/config/arm/arm.md 2011-10-03 09:47:33 +0000
106+++ new/gcc/config/arm/arm.md 2011-10-11 02:31:01 +0000
107@@ -113,6 +113,10 @@
108 (UNSPEC_SYMBOL_OFFSET 27) ; The offset of the start of the symbol from
109 ; another symbolic address.
110 (UNSPEC_MEMORY_BARRIER 28) ; Represent a memory barrier.
111+ (UNSPEC_UNALIGNED_LOAD 29) ; Used to represent ldr/ldrh instructions that access
112+ ; unaligned locations, on architectures which support
113+ ; that.
114+ (UNSPEC_UNALIGNED_STORE 30) ; Same for str/strh.
115 ]
116 )
117
118@@ -2463,10 +2467,10 @@
119 ;;; this insv pattern, so this pattern needs to be reevalutated.
120
121 (define_expand "insv"
122- [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
123- (match_operand:SI 1 "general_operand" "")
124- (match_operand:SI 2 "general_operand" ""))
125- (match_operand:SI 3 "reg_or_int_operand" ""))]
126+ [(set (zero_extract (match_operand 0 "nonimmediate_operand" "")
127+ (match_operand 1 "general_operand" "")
128+ (match_operand 2 "general_operand" ""))
129+ (match_operand 3 "reg_or_int_operand" ""))]
130 "TARGET_ARM || arm_arch_thumb2"
131 "
132 {
133@@ -2477,35 +2481,70 @@
134
135 if (arm_arch_thumb2)
136 {
137- bool use_bfi = TRUE;
138-
139- if (GET_CODE (operands[3]) == CONST_INT)
140- {
141- HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
142-
143- if (val == 0)
144- {
145- emit_insn (gen_insv_zero (operands[0], operands[1],
146- operands[2]));
147+ if (unaligned_access && MEM_P (operands[0])
148+ && s_register_operand (operands[3], GET_MODE (operands[3]))
149+ && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
150+ {
151+ rtx base_addr;
152+
153+ if (BYTES_BIG_ENDIAN)
154+ start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
155+ - start_bit;
156+
157+ if (width == 32)
158+ {
159+ base_addr = adjust_address (operands[0], SImode,
160+ start_bit / BITS_PER_UNIT);
161+ emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
162+ }
163+ else
164+ {
165+ rtx tmp = gen_reg_rtx (HImode);
166+
167+ base_addr = adjust_address (operands[0], HImode,
168+ start_bit / BITS_PER_UNIT);
169+ emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
170+ emit_insn (gen_unaligned_storehi (base_addr, tmp));
171+ }
172+ DONE;
173+ }
174+ else if (s_register_operand (operands[0], GET_MODE (operands[0])))
175+ {
176+ bool use_bfi = TRUE;
177+
178+ if (GET_CODE (operands[3]) == CONST_INT)
179+ {
180+ HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
181+
182+ if (val == 0)
183+ {
184+ emit_insn (gen_insv_zero (operands[0], operands[1],
185+ operands[2]));
186+ DONE;
187+ }
188+
189+ /* See if the set can be done with a single orr instruction. */
190+ if (val == mask && const_ok_for_arm (val << start_bit))
191+ use_bfi = FALSE;
192+ }
193+
194+ if (use_bfi)
195+ {
196+ if (GET_CODE (operands[3]) != REG)
197+ operands[3] = force_reg (SImode, operands[3]);
198+
199+ emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
200+ operands[3]));
201 DONE;
202 }
203-
204- /* See if the set can be done with a single orr instruction. */
205- if (val == mask && const_ok_for_arm (val << start_bit))
206- use_bfi = FALSE;
207- }
208-
209- if (use_bfi)
210- {
211- if (GET_CODE (operands[3]) != REG)
212- operands[3] = force_reg (SImode, operands[3]);
213-
214- emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
215- operands[3]));
216- DONE;
217- }
218+ }
219+ else
220+ FAIL;
221 }
222
223+ if (!s_register_operand (operands[0], GET_MODE (operands[0])))
224+ FAIL;
225+
226 target = copy_rtx (operands[0]);
227 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
228 subreg as the final target. */
229@@ -3697,12 +3736,10 @@
230 ;; to reduce register pressure later on.
231
232 (define_expand "extzv"
233- [(set (match_dup 4)
234- (ashift:SI (match_operand:SI 1 "register_operand" "")
235- (match_operand:SI 2 "const_int_operand" "")))
236- (set (match_operand:SI 0 "register_operand" "")
237- (lshiftrt:SI (match_dup 4)
238- (match_operand:SI 3 "const_int_operand" "")))]
239+ [(set (match_operand 0 "s_register_operand" "")
240+ (zero_extract (match_operand 1 "nonimmediate_operand" "")
241+ (match_operand 2 "const_int_operand" "")
242+ (match_operand 3 "const_int_operand" "")))]
243 "TARGET_THUMB1 || arm_arch_thumb2"
244 "
245 {
246@@ -3711,10 +3748,57 @@
247
248 if (arm_arch_thumb2)
249 {
250- emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
251- operands[3]));
252- DONE;
253+ HOST_WIDE_INT width = INTVAL (operands[2]);
254+ HOST_WIDE_INT bitpos = INTVAL (operands[3]);
255+
256+ if (unaligned_access && MEM_P (operands[1])
257+ && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
258+ {
259+ rtx base_addr;
260+
261+ if (BYTES_BIG_ENDIAN)
262+ bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
263+ - bitpos;
264+
265+ if (width == 32)
266+ {
267+ base_addr = adjust_address (operands[1], SImode,
268+ bitpos / BITS_PER_UNIT);
269+ emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
270+ }
271+ else
272+ {
273+ rtx dest = operands[0];
274+ rtx tmp = gen_reg_rtx (SImode);
275+
276+ /* We may get a paradoxical subreg here. Strip it off. */
277+ if (GET_CODE (dest) == SUBREG
278+ && GET_MODE (dest) == SImode
279+ && GET_MODE (SUBREG_REG (dest)) == HImode)
280+ dest = SUBREG_REG (dest);
281+
282+ if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
283+ FAIL;
284+
285+ base_addr = adjust_address (operands[1], HImode,
286+ bitpos / BITS_PER_UNIT);
287+ emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
288+ emit_move_insn (gen_lowpart (SImode, dest), tmp);
289+ }
290+ DONE;
291+ }
292+ else if (s_register_operand (operands[1], GET_MODE (operands[1])))
293+ {
294+ emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
295+ operands[3]));
296+ DONE;
297+ }
298+ else
299+ FAIL;
300 }
301+
302+ if (!s_register_operand (operands[1], GET_MODE (operands[1])))
303+ FAIL;
304
305 operands[3] = GEN_INT (rshift);
306
307@@ -3724,12 +3808,154 @@
308 DONE;
309 }
310
311- operands[2] = GEN_INT (lshift);
312- operands[4] = gen_reg_rtx (SImode);
313+ emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
314+ operands[3], gen_reg_rtx (SImode)));
315+ DONE;
316 }"
317 )
318
319-(define_insn "extv"
320+;; Helper for extzv, for the Thumb-1 register-shifts case.
321+
322+(define_expand "extzv_t1"
323+ [(set (match_operand:SI 4 "s_register_operand" "")
324+ (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "")
325+ (match_operand:SI 2 "const_int_operand" "")))
326+ (set (match_operand:SI 0 "s_register_operand" "")
327+ (lshiftrt:SI (match_dup 4)
328+ (match_operand:SI 3 "const_int_operand" "")))]
329+ "TARGET_THUMB1"
330+ "")
331+
332+(define_expand "extv"
333+ [(set (match_operand 0 "s_register_operand" "")
334+ (sign_extract (match_operand 1 "nonimmediate_operand" "")
335+ (match_operand 2 "const_int_operand" "")
336+ (match_operand 3 "const_int_operand" "")))]
337+ "arm_arch_thumb2"
338+{
339+ HOST_WIDE_INT width = INTVAL (operands[2]);
340+ HOST_WIDE_INT bitpos = INTVAL (operands[3]);
341+
342+ if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
343+ && (bitpos % BITS_PER_UNIT) == 0)
344+ {
345+ rtx base_addr;
346+
347+ if (BYTES_BIG_ENDIAN)
348+ bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
349+
350+ if (width == 32)
351+ {
352+ base_addr = adjust_address (operands[1], SImode,
353+ bitpos / BITS_PER_UNIT);
354+ emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
355+ }
356+ else
357+ {
358+ rtx dest = operands[0];
359+ rtx tmp = gen_reg_rtx (SImode);
360+
361+ /* We may get a paradoxical subreg here. Strip it off. */
362+ if (GET_CODE (dest) == SUBREG
363+ && GET_MODE (dest) == SImode
364+ && GET_MODE (SUBREG_REG (dest)) == HImode)
365+ dest = SUBREG_REG (dest);
366+
367+ if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
368+ FAIL;
369+
370+ base_addr = adjust_address (operands[1], HImode,
371+ bitpos / BITS_PER_UNIT);
372+ emit_insn (gen_unaligned_loadhis (tmp, base_addr));
373+ emit_move_insn (gen_lowpart (SImode, dest), tmp);
374+ }
375+
376+ DONE;
377+ }
378+ else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
379+ FAIL;
380+ else if (GET_MODE (operands[0]) == SImode
381+ && GET_MODE (operands[1]) == SImode)
382+ {
383+ emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
384+ operands[3]));
385+ DONE;
386+ }
387+
388+ FAIL;
389+})
390+
391+; Helper to expand register forms of extv with the proper modes.
392+
393+(define_expand "extv_regsi"
394+ [(set (match_operand:SI 0 "s_register_operand" "")
395+ (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
396+ (match_operand 2 "const_int_operand" "")
397+ (match_operand 3 "const_int_operand" "")))]
398+ ""
399+{
400+})
401+
402+; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
403+
404+(define_insn "unaligned_loadsi"
405+ [(set (match_operand:SI 0 "s_register_operand" "=l,r")
406+ (unspec:SI [(match_operand:SI 1 "memory_operand" "Uw,m")]
407+ UNSPEC_UNALIGNED_LOAD))]
408+ "unaligned_access && TARGET_32BIT"
409+ "ldr%?\t%0, %1\t@ unaligned"
410+ [(set_attr "arch" "t2,any")
411+ (set_attr "length" "2,4")
412+ (set_attr "predicable" "yes")
413+ (set_attr "type" "load1")])
414+
415+(define_insn "unaligned_loadhis"
416+ [(set (match_operand:SI 0 "s_register_operand" "=l,r")
417+ (sign_extend:SI
418+ (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
419+ UNSPEC_UNALIGNED_LOAD)))]
420+ "unaligned_access && TARGET_32BIT"
421+ "ldr%(sh%)\t%0, %1\t@ unaligned"
422+ [(set_attr "arch" "t2,any")
423+ (set_attr "length" "2,4")
424+ (set_attr "predicable" "yes")
425+ (set_attr "type" "load_byte")])
426+
427+(define_insn "unaligned_loadhiu"
428+ [(set (match_operand:SI 0 "s_register_operand" "=l,r")
429+ (zero_extend:SI
430+ (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
431+ UNSPEC_UNALIGNED_LOAD)))]
432+ "unaligned_access && TARGET_32BIT"
433+ "ldr%(h%)\t%0, %1\t@ unaligned"
434+ [(set_attr "arch" "t2,any")
435+ (set_attr "length" "2,4")
436+ (set_attr "predicable" "yes")
437+ (set_attr "type" "load_byte")])
438+
439+(define_insn "unaligned_storesi"
440+ [(set (match_operand:SI 0 "memory_operand" "=Uw,m")
441+ (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,r")]
442+ UNSPEC_UNALIGNED_STORE))]
443+ "unaligned_access && TARGET_32BIT"
444+ "str%?\t%1, %0\t@ unaligned"
445+ [(set_attr "arch" "t2,any")
446+ (set_attr "length" "2,4")
447+ (set_attr "predicable" "yes")
448+ (set_attr "type" "store1")])
449+
450+(define_insn "unaligned_storehi"
451+ [(set (match_operand:HI 0 "memory_operand" "=Uw,m")
452+ (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,r")]
453+ UNSPEC_UNALIGNED_STORE))]
454+ "unaligned_access && TARGET_32BIT"
455+ "str%(h%)\t%1, %0\t@ unaligned"
456+ [(set_attr "arch" "t2,any")
457+ (set_attr "length" "2,4")
458+ (set_attr "predicable" "yes")
459+ (set_attr "type" "store1")])
460+
461+(define_insn "*extv_reg"
462 [(set (match_operand:SI 0 "s_register_operand" "=r")
463 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
464 (match_operand:SI 2 "const_int_operand" "M")
465@@ -6038,8 +6264,8 @@
466
467
468 (define_insn "*arm_movqi_insn"
469- [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
470- (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
471+ [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,l,Uu,r,m")
472+ (match_operand:QI 1 "general_operand" "rI,K,Uu,l,m,r"))]
473 "TARGET_32BIT
474 && ( register_operand (operands[0], QImode)
475 || register_operand (operands[1], QImode))"
476@@ -6047,10 +6273,14 @@
477 mov%?\\t%0, %1
478 mvn%?\\t%0, #%B1
479 ldr%(b%)\\t%0, %1
480+ str%(b%)\\t%1, %0
481+ ldr%(b%)\\t%0, %1
482 str%(b%)\\t%1, %0"
483- [(set_attr "type" "*,*,load1,store1")
484- (set_attr "insn" "mov,mvn,*,*")
485- (set_attr "predicable" "yes")]
486+ [(set_attr "type" "*,*,load1,store1,load1,store1")
487+ (set_attr "insn" "mov,mvn,*,*,*,*")
488+ (set_attr "predicable" "yes")
489+ (set_attr "arch" "any,any,t2,t2,any,any")
490+ (set_attr "length" "4,4,2,2,4,4")]
491 )
492
493 (define_insn "*thumb1_movqi_insn"
494
495=== modified file 'gcc/config/arm/arm.opt'
496--- old/gcc/config/arm/arm.opt 2011-09-19 07:44:24 +0000
497+++ new/gcc/config/arm/arm.opt 2011-10-11 02:31:01 +0000
498@@ -173,3 +173,7 @@
499 Target Report Var(fix_cm3_ldrd) Init(2)
500 Avoid overlapping destination and address registers on LDRD instructions
501 that may trigger Cortex-M3 errata.
502+
503+munaligned-access
504+Target Report Var(unaligned_access) Init(2)
505+Enable unaligned word and halfword accesses to packed data.
506
507=== modified file 'gcc/config/arm/constraints.md'
508--- old/gcc/config/arm/constraints.md 2011-09-12 14:14:00 +0000
509+++ new/gcc/config/arm/constraints.md 2011-10-11 02:31:01 +0000
510@@ -36,6 +36,7 @@
511 ;; The following memory constraints have been used:
512 ;; in ARM/Thumb-2 state: Q, Ut, Uv, Uy, Un, Um, Us
513 ;; in ARM state: Uq
514+;; in Thumb state: Uu, Uw
515
516
517 (define_register_constraint "f" "TARGET_ARM ? FPA_REGS : NO_REGS"
518@@ -344,6 +345,27 @@
519 (and (match_code "mem")
520 (match_test "REG_P (XEXP (op, 0))")))
521
522+(define_memory_constraint "Uu"
523+ "@internal
524+ In Thumb state an address that is valid in 16bit encoding."
525+ (and (match_code "mem")
526+ (match_test "TARGET_THUMB
527+ && thumb1_legitimate_address_p (GET_MODE (op), XEXP (op, 0),
528+ 0)")))
529+
530+; The 16-bit post-increment LDR/STR accepted by thumb1_legitimate_address_p
531+; are actually LDM/STM instructions, so cannot be used to access unaligned
532+; data.
533+(define_memory_constraint "Uw"
534+ "@internal
535+ In Thumb state an address that is valid in 16bit encoding, and that can be
536+ used for unaligned accesses."
537+ (and (match_code "mem")
538+ (match_test "TARGET_THUMB
539+ && thumb1_legitimate_address_p (GET_MODE (op), XEXP (op, 0),
540+ 0)
541+ && GET_CODE (XEXP (op, 0)) != POST_INC")))
542+
543 ;; We used to have constraint letters for S and R in ARM state, but
544 ;; all uses of these now appear to have been removed.
545
546
547=== modified file 'gcc/expmed.c'
548--- old/gcc/expmed.c 2011-05-22 19:02:59 +0000
549+++ new/gcc/expmed.c 2011-10-11 02:31:01 +0000
550@@ -657,6 +657,10 @@
551 && GET_MODE (value) != BLKmode
552 && bitsize > 0
553 && GET_MODE_BITSIZE (op_mode) >= bitsize
554+ /* Do not use insv for volatile bitfields when
555+ -fstrict-volatile-bitfields is in effect. */
556+ && !(MEM_P (op0) && MEM_VOLATILE_P (op0)
557+ && flag_strict_volatile_bitfields > 0)
558 && ! ((REG_P (op0) || GET_CODE (op0) == SUBREG)
559 && (bitsize + bitpos > GET_MODE_BITSIZE (op_mode)))
560 && insn_data[CODE_FOR_insv].operand[1].predicate (GEN_INT (bitsize),
561@@ -700,19 +704,21 @@
562 copy_back = true;
563 }
564
565- /* On big-endian machines, we count bits from the most significant.
566- If the bit field insn does not, we must invert. */
567-
568- if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
569- xbitpos = unit - bitsize - xbitpos;
570-
571 /* We have been counting XBITPOS within UNIT.
572 Count instead within the size of the register. */
573- if (BITS_BIG_ENDIAN && !MEM_P (xop0))
574+ if (BYTES_BIG_ENDIAN && !MEM_P (xop0))
575 xbitpos += GET_MODE_BITSIZE (op_mode) - unit;
576
577 unit = GET_MODE_BITSIZE (op_mode);
578
579+ /* If BITS_BIG_ENDIAN is zero on a BYTES_BIG_ENDIAN machine, we count
580+ "backwards" from the size of the unit we are inserting into.
581+ Otherwise, we count bits from the most significant on a
582+ BYTES/BITS_BIG_ENDIAN machine. */
583+
584+ if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
585+ xbitpos = unit - bitsize - xbitpos;
586+
587 /* Convert VALUE to op_mode (which insv insn wants) in VALUE1. */
588 value1 = value;
589 if (GET_MODE (value) != op_mode)
590@@ -1528,6 +1534,10 @@
591 if (ext_mode != MAX_MACHINE_MODE
592 && bitsize > 0
593 && GET_MODE_BITSIZE (ext_mode) >= bitsize
594+ /* Do not use extv/extzv for volatile bitfields when
595+ -fstrict-volatile-bitfields is in effect. */
596+ && !(MEM_P (op0) && MEM_VOLATILE_P (op0)
597+ && flag_strict_volatile_bitfields > 0)
598 /* If op0 is a register, we need it in EXT_MODE to make it
599 acceptable to the format of ext(z)v. */
600 && !(GET_CODE (op0) == SUBREG && GET_MODE (op0) != ext_mode)
601@@ -1552,17 +1562,20 @@
602 /* Get ref to first byte containing part of the field. */
603 xop0 = adjust_address (xop0, byte_mode, xoffset);
604
605- /* On big-endian machines, we count bits from the most significant.
606- If the bit field insn does not, we must invert. */
607- if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
608- xbitpos = unit - bitsize - xbitpos;
609-
610 /* Now convert from counting within UNIT to counting in EXT_MODE. */
611- if (BITS_BIG_ENDIAN && !MEM_P (xop0))
612+ if (BYTES_BIG_ENDIAN && !MEM_P (xop0))
613 xbitpos += GET_MODE_BITSIZE (ext_mode) - unit;
614
615 unit = GET_MODE_BITSIZE (ext_mode);
616
617+ /* If BITS_BIG_ENDIAN is zero on a BYTES_BIG_ENDIAN machine, we count
618+ "backwards" from the size of the unit we are extracting from.
619+ Otherwise, we count bits from the most significant on a
620+ BYTES/BITS_BIG_ENDIAN machine. */
621+
622+ if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
623+ xbitpos = unit - bitsize - xbitpos;
624+
625 if (xtarget == 0)
626 xtarget = xspec_target = gen_reg_rtx (tmode);
627
628