summaryrefslogtreecommitdiffstats
path: root/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch
diff options
context:
space:
mode:
authorKoen Kooi <koen@dominion.thruhere.net>2010-11-02 22:03:58 +0100
committerKoen Kooi <koen@dominion.thruhere.net>2010-11-02 22:12:02 +0100
commitbe10a6b1321f250b1034c7d9d0a8ef18b296eef1 (patch)
tree9249025cbfbfbee4cc430d62b27f75301dd4dfde /recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch
parent93b28937ac67ba46d65f55637e42552e224aa7e2 (diff)
downloadmeta-openembedded-be10a6b1321f250b1034c7d9d0a8ef18b296eef1.tar.gz
angstrom-layers: meta-openembedded: replace poky gcc 4.5 sources with OE ones
This needs further investigation, but for now we can get the tested sources into the poky gcc harness Signed-off-by: Koen Kooi <k-kooi@ti.com>
Diffstat (limited to 'recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch')
-rw-r--r--recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch1268
1 files changed, 1268 insertions, 0 deletions
diff --git a/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch b/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch
new file mode 100644
index 0000000000..6627a11d4a
--- /dev/null
+++ b/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99402.patch
@@ -0,0 +1,1268 @@
12010-09-17 Chung-Lin Tang <cltang@codesourcery.com>
2
3 Backport from mainline:
4
5 2010-07-15 Bernd Schmidt <bernds@codesourcery.com>
6
7 gcc/
8 * postreload.c (last_label_ruid, first_index_reg, last_index_reg):
9 New static variables.
10 (reload_combine_recognize_pattern): New static function, broken out
11 of reload_combine.
12 (reload_combine): Use it. Only initialize first_index_reg and
13 last_index_reg once.
14
15 2010-07-17 Bernd Schmidt <bernds@codesourcery.com>
16
17 PR target/42235
18 gcc/
19 * postreload.c (reload_cse_move2add): Return bool, true if anything.
20 changed. All callers changed.
21 (move2add_use_add2_insn): Likewise.
22 (move2add_use_add3_insn): Likewise.
23 (reload_cse_regs): If reload_cse_move2add changed anything, rerun
24 reload_combine.
25 (RELOAD_COMBINE_MAX_USES): Bump to 16.
26 (last_jump_ruid): New static variable.
27 (struct reg_use): New members CONTAINING_MEM and RUID.
28 (reg_state): New members ALL_OFFSETS_MATCH and REAL_STORE_RUID.
29 (reload_combine_split_one_ruid, reload_combine_split_ruids,
30 reload_combine_purge_insn_uses, reload_combine_closest_single_use
31 reload_combine_purge_reg_uses_after_ruid,
32 reload_combine_recognize_const_pattern): New static functions.
33 (reload_combine_recognize_pattern): Verify that ALL_OFFSETS_MATCH
34 is true for our reg and that we have available index regs.
35 (reload_combine_note_use): New args RUID and CONTAINING_MEM. All
36 callers changed. Use them to initialize fields in struct reg_use.
37 (reload_combine): Initialize last_jump_ruid. Be careful when to
38 take PREV_INSN of the scanned insn. Update REAL_STORE_RUID fields.
39 Call reload_combine_recognize_const_pattern.
40 (reload_combine_note_store): Update REAL_STORE_RUID field.
41
42 gcc/testsuite/
43 * gcc.target/arm/pr42235.c: New test.
44
45 2010-07-19 Bernd Schmidt <bernds@codesourcery.com>
46
47 gcc/
48 * postreload.c (reload_combine_closest_single_use): Ignore the
49 number of uses for DEBUG_INSNs.
50 (fixup_debug_insns): New static function.
51 (reload_combine_recognize_const_pattern): Use it. Don't let the
52 main loop be affected by DEBUG_INSNs.
53 Really disallow moving adds past a jump insn.
54 (reload_combine_recognize_pattern): Don't update use_ruid here.
55 (reload_combine_note_use): Do it here.
56 (reload_combine): Use control_flow_insn_p rather than JUMP_P.
57
58 2010-07-20 Bernd Schmidt <bernds@codesourcery.com>
59
60 gcc/
61 * postreload.c (fixup_debug_insns): Remove arg REGNO. New args
62 FROM and TO. All callers changed. Don't look for tracked uses,
63 just scan the RTL for DEBUG_INSNs and substitute.
64 (reload_combine_recognize_pattern): Call fixup_debug_insns.
65 (reload_combine): Ignore DEBUG_INSNs.
66
67 2010-07-22 Bernd Schmidt <bernds@codesourcery.com>
68
69 PR bootstrap/44970
70 PR middle-end/45009
71 gcc/
72 * postreload.c: Include "target.h".
73 (reload_combine_closest_single_use): Don't take DEBUG_INSNs
74 into account.
75 (fixup_debug_insns): Don't copy the rtx.
76 (reload_combine_recognize_const_pattern): DEBUG_INSNs can't have uses.
77 Don't copy when replacing. Call fixup_debug_insns in the case where
78 we merged one add with another.
79 (reload_combine_recognize_pattern): Fail if there aren't any uses.
80 Try harder to determine whether we're picking a valid index register.
81 Don't set store_ruid for an insn we're going to scan in the
82 next iteration.
83 (reload_combine): Remove unused code.
84 (reload_combine_note_use): When updating use information for
85 an old insn, ignore a use that occurs after store_ruid.
86 * Makefile.in (postreload.o): Update dependencies.
87
88 2010-07-27 Bernd Schmidt <bernds@codesourcery.com>
89
90 gcc/
91 * postreload.c (reload_combine_recognize_const_pattern): Move test
92 for limiting the insn movement to the right scope.
93
94 2010-07-27 Bernd Schmidt <bernds@codesourcery.com>
95
96 gcc/
97 * postreload.c (try_replace_in_use): New static function.
98 (reload_combine_recognize_const_pattern): Use it here. Allow
99 substituting into a final add insn, and substituting into a memory
100 reference in an insn that sets the reg.
101
102=== modified file 'gcc/Makefile.in'
103Index: gcc-4.5/gcc/Makefile.in
104===================================================================
105--- gcc-4.5.orig/gcc/Makefile.in
106+++ gcc-4.5/gcc/Makefile.in
107@@ -3159,7 +3159,7 @@ postreload.o : postreload.c $(CONFIG_H)
108 $(RTL_H) $(REAL_H) $(FLAGS_H) $(EXPR_H) $(OPTABS_H) reload.h $(REGS_H) \
109 hard-reg-set.h insn-config.h $(BASIC_BLOCK_H) $(RECOG_H) output.h \
110 $(FUNCTION_H) $(TOPLEV_H) cselib.h $(TM_P_H) $(EXCEPT_H) $(TREE_H) $(MACHMODE_H) \
111- $(OBSTACK_H) $(TIMEVAR_H) $(TREE_PASS_H) $(DF_H) $(DBGCNT_H)
112+ $(OBSTACK_H) $(TARGET_H) $(TIMEVAR_H) $(TREE_PASS_H) $(DF_H) $(DBGCNT_H)
113 postreload-gcse.o : postreload-gcse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
114 $(TM_H) $(RTL_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) insn-config.h \
115 $(RECOG_H) $(EXPR_H) $(BASIC_BLOCK_H) $(FUNCTION_H) output.h $(TOPLEV_H) \
116Index: gcc-4.5/gcc/postreload.c
117===================================================================
118--- gcc-4.5.orig/gcc/postreload.c
119+++ gcc-4.5/gcc/postreload.c
120@@ -44,6 +44,7 @@ along with GCC; see the file COPYING3.
121 #include "toplev.h"
122 #include "except.h"
123 #include "tree.h"
124+#include "target.h"
125 #include "timevar.h"
126 #include "tree-pass.h"
127 #include "df.h"
128@@ -56,10 +57,10 @@ static int reload_cse_simplify_set (rtx,
129 static int reload_cse_simplify_operands (rtx, rtx);
130
131 static void reload_combine (void);
132-static void reload_combine_note_use (rtx *, rtx);
133+static void reload_combine_note_use (rtx *, rtx, int, rtx);
134 static void reload_combine_note_store (rtx, const_rtx, void *);
135
136-static void reload_cse_move2add (rtx);
137+static bool reload_cse_move2add (rtx);
138 static void move2add_note_store (rtx, const_rtx, void *);
139
140 /* Call cse / combine like post-reload optimization phases.
141@@ -67,11 +68,16 @@ static void move2add_note_store (rtx, co
142 void
143 reload_cse_regs (rtx first ATTRIBUTE_UNUSED)
144 {
145+ bool moves_converted;
146 reload_cse_regs_1 (first);
147 reload_combine ();
148- reload_cse_move2add (first);
149+ moves_converted = reload_cse_move2add (first);
150 if (flag_expensive_optimizations)
151- reload_cse_regs_1 (first);
152+ {
153+ if (moves_converted)
154+ reload_combine ();
155+ reload_cse_regs_1 (first);
156+ }
157 }
158
159 /* See whether a single set SET is a noop. */
160@@ -660,30 +666,43 @@ reload_cse_simplify_operands (rtx insn,
161
162 /* The maximum number of uses of a register we can keep track of to
163 replace them with reg+reg addressing. */
164-#define RELOAD_COMBINE_MAX_USES 6
165+#define RELOAD_COMBINE_MAX_USES 16
166
167-/* INSN is the insn where a register has been used, and USEP points to the
168- location of the register within the rtl. */
169-struct reg_use { rtx insn, *usep; };
170+/* Describes a recorded use of a register. */
171+struct reg_use
172+{
173+ /* The insn where a register has been used. */
174+ rtx insn;
175+ /* Points to the memory reference enclosing the use, if any, NULL_RTX
176+ otherwise. */
177+ rtx containing_mem;
178+ /* Location of the register withing INSN. */
179+ rtx *usep;
180+ /* The reverse uid of the insn. */
181+ int ruid;
182+};
183
184 /* If the register is used in some unknown fashion, USE_INDEX is negative.
185 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
186- indicates where it becomes live again.
187+ indicates where it is first set or clobbered.
188 Otherwise, USE_INDEX is the index of the last encountered use of the
189- register (which is first among these we have seen since we scan backwards),
190- OFFSET contains the constant offset that is added to the register in
191- all encountered uses, and USE_RUID indicates the first encountered, i.e.
192- last, of these uses.
193+ register (which is first among these we have seen since we scan backwards).
194+ USE_RUID indicates the first encountered, i.e. last, of these uses.
195+ If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
196+ with a constant offset; OFFSET contains this constant in that case.
197 STORE_RUID is always meaningful if we only want to use a value in a
198 register in a different place: it denotes the next insn in the insn
199- stream (i.e. the last encountered) that sets or clobbers the register. */
200+ stream (i.e. the last encountered) that sets or clobbers the register.
201+ REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
202 static struct
203 {
204 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
205- int use_index;
206 rtx offset;
207+ int use_index;
208 int store_ruid;
209+ int real_store_ruid;
210 int use_ruid;
211+ bool all_offsets_match;
212 } reg_state[FIRST_PSEUDO_REGISTER];
213
214 /* Reverse linear uid. This is increased in reload_combine while scanning
215@@ -691,42 +710,548 @@ static struct
216 and the store_ruid / use_ruid fields in reg_state. */
217 static int reload_combine_ruid;
218
219+/* The RUID of the last label we encountered in reload_combine. */
220+static int last_label_ruid;
221+
222+/* The RUID of the last jump we encountered in reload_combine. */
223+static int last_jump_ruid;
224+
225+/* The register numbers of the first and last index register. A value of
226+ -1 in LAST_INDEX_REG indicates that we've previously computed these
227+ values and found no suitable index registers. */
228+static int first_index_reg = -1;
229+static int last_index_reg;
230+
231 #define LABEL_LIVE(LABEL) \
232 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
233
234+/* Subroutine of reload_combine_split_ruids, called to fix up a single
235+ ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
236+
237+static inline void
238+reload_combine_split_one_ruid (int *pruid, int split_ruid)
239+{
240+ if (*pruid > split_ruid)
241+ (*pruid)++;
242+}
243+
244+/* Called when we insert a new insn in a position we've already passed in
245+ the scan. Examine all our state, increasing all ruids that are higher
246+ than SPLIT_RUID by one in order to make room for a new insn. */
247+
248+static void
249+reload_combine_split_ruids (int split_ruid)
250+{
251+ unsigned i;
252+
253+ reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
254+ reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
255+ reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
256+
257+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
258+ {
259+ int j, idx = reg_state[i].use_index;
260+ reload_combine_split_one_ruid (&reg_state[i].use_ruid, split_ruid);
261+ reload_combine_split_one_ruid (&reg_state[i].store_ruid, split_ruid);
262+ reload_combine_split_one_ruid (&reg_state[i].real_store_ruid,
263+ split_ruid);
264+ if (idx < 0)
265+ continue;
266+ for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
267+ {
268+ reload_combine_split_one_ruid (&reg_state[i].reg_use[j].ruid,
269+ split_ruid);
270+ }
271+ }
272+}
273+
274+/* Called when we are about to rescan a previously encountered insn with
275+ reload_combine_note_use after modifying some part of it. This clears all
276+ information about uses in that particular insn. */
277+
278+static void
279+reload_combine_purge_insn_uses (rtx insn)
280+{
281+ unsigned i;
282+
283+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
284+ {
285+ int j, k, idx = reg_state[i].use_index;
286+ if (idx < 0)
287+ continue;
288+ j = k = RELOAD_COMBINE_MAX_USES;
289+ while (j-- > idx)
290+ {
291+ if (reg_state[i].reg_use[j].insn != insn)
292+ {
293+ k--;
294+ if (k != j)
295+ reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
296+ }
297+ }
298+ reg_state[i].use_index = k;
299+ }
300+}
301+
302+/* Called when we need to forget about all uses of REGNO after an insn
303+ which is identified by RUID. */
304+
305+static void
306+reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
307+{
308+ int j, k, idx = reg_state[regno].use_index;
309+ if (idx < 0)
310+ return;
311+ j = k = RELOAD_COMBINE_MAX_USES;
312+ while (j-- > idx)
313+ {
314+ if (reg_state[regno].reg_use[j].ruid >= ruid)
315+ {
316+ k--;
317+ if (k != j)
318+ reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
319+ }
320+ }
321+ reg_state[regno].use_index = k;
322+}
323+
324+/* Find the use of REGNO with the ruid that is highest among those
325+ lower than RUID_LIMIT, and return it if it is the only use of this
326+ reg in the insn. Return NULL otherwise. */
327+
328+static struct reg_use *
329+reload_combine_closest_single_use (unsigned regno, int ruid_limit)
330+{
331+ int i, best_ruid = 0;
332+ int use_idx = reg_state[regno].use_index;
333+ struct reg_use *retval;
334+
335+ if (use_idx < 0)
336+ return NULL;
337+ retval = NULL;
338+ for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
339+ {
340+ struct reg_use *use = reg_state[regno].reg_use + i;
341+ int this_ruid = use->ruid;
342+ if (this_ruid >= ruid_limit)
343+ continue;
344+ if (this_ruid > best_ruid)
345+ {
346+ best_ruid = this_ruid;
347+ retval = use;
348+ }
349+ else if (this_ruid == best_ruid)
350+ retval = NULL;
351+ }
352+ if (last_label_ruid >= best_ruid)
353+ return NULL;
354+ return retval;
355+}
356+
357+/* After we've moved an add insn, fix up any debug insns that occur
358+ between the old location of the add and the new location. REG is
359+ the destination register of the add insn; REPLACEMENT is the
360+ SET_SRC of the add. FROM and TO specify the range in which we
361+ should make this change on debug insns. */
362+
363+static void
364+fixup_debug_insns (rtx reg, rtx replacement, rtx from, rtx to)
365+{
366+ rtx insn;
367+ for (insn = from; insn != to; insn = NEXT_INSN (insn))
368+ {
369+ rtx t;
370+
371+ if (!DEBUG_INSN_P (insn))
372+ continue;
373+
374+ t = INSN_VAR_LOCATION_LOC (insn);
375+ t = simplify_replace_rtx (t, reg, replacement);
376+ validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
377+ }
378+}
379+
380+/* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
381+ with SRC in the insn described by USE, taking costs into account. Return
382+ true if we made the replacement. */
383+
384+static bool
385+try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
386+{
387+ rtx use_insn = use->insn;
388+ rtx mem = use->containing_mem;
389+ bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
390+
391+ if (mem != NULL_RTX)
392+ {
393+ addr_space_t as = MEM_ADDR_SPACE (mem);
394+ rtx oldaddr = XEXP (mem, 0);
395+ rtx newaddr = NULL_RTX;
396+ int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
397+ int new_cost;
398+
399+ newaddr = simplify_replace_rtx (oldaddr, reg, src);
400+ if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
401+ {
402+ XEXP (mem, 0) = newaddr;
403+ new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
404+ XEXP (mem, 0) = oldaddr;
405+ if (new_cost <= old_cost
406+ && validate_change (use_insn,
407+ &XEXP (mem, 0), newaddr, 0))
408+ return true;
409+ }
410+ }
411+ else
412+ {
413+ rtx new_set = single_set (use_insn);
414+ if (new_set
415+ && REG_P (SET_DEST (new_set))
416+ && GET_CODE (SET_SRC (new_set)) == PLUS
417+ && REG_P (XEXP (SET_SRC (new_set), 0))
418+ && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
419+ {
420+ rtx new_src;
421+ int old_cost = rtx_cost (SET_SRC (new_set), SET, speed);
422+
423+ gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
424+ new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
425+
426+ if (rtx_cost (new_src, SET, speed) <= old_cost
427+ && validate_change (use_insn, &SET_SRC (new_set),
428+ new_src, 0))
429+ return true;
430+ }
431+ }
432+ return false;
433+}
434+
435+/* Called by reload_combine when scanning INSN. This function tries to detect
436+ patterns where a constant is added to a register, and the result is used
437+ in an address.
438+ Return true if no further processing is needed on INSN; false if it wasn't
439+ recognized and should be handled normally. */
440+
441+static bool
442+reload_combine_recognize_const_pattern (rtx insn)
443+{
444+ int from_ruid = reload_combine_ruid;
445+ rtx set, pat, reg, src, addreg;
446+ unsigned int regno;
447+ struct reg_use *use;
448+ bool must_move_add;
449+ rtx add_moved_after_insn = NULL_RTX;
450+ int add_moved_after_ruid = 0;
451+ int clobbered_regno = -1;
452+
453+ set = single_set (insn);
454+ if (set == NULL_RTX)
455+ return false;
456+
457+ reg = SET_DEST (set);
458+ src = SET_SRC (set);
459+ if (!REG_P (reg)
460+ || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1
461+ || GET_MODE (reg) != Pmode
462+ || reg == stack_pointer_rtx)
463+ return false;
464+
465+ regno = REGNO (reg);
466+
467+ /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
468+ uses of REG1 inside an address, or inside another add insn. If
469+ possible and profitable, merge the addition into subsequent
470+ uses. */
471+ if (GET_CODE (src) != PLUS
472+ || !REG_P (XEXP (src, 0))
473+ || !CONSTANT_P (XEXP (src, 1)))
474+ return false;
475+
476+ addreg = XEXP (src, 0);
477+ must_move_add = rtx_equal_p (reg, addreg);
478+
479+ pat = PATTERN (insn);
480+ if (must_move_add && set != pat)
481+ {
482+ /* We have to be careful when moving the add; apart from the
483+ single_set there may also be clobbers. Recognize one special
484+ case, that of one clobber alongside the set (likely a clobber
485+ of the CC register). */
486+ gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
487+ if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
488+ || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
489+ || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
490+ return false;
491+ clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
492+ }
493+
494+ do
495+ {
496+ use = reload_combine_closest_single_use (regno, from_ruid);
497+
498+ if (use)
499+ /* Start the search for the next use from here. */
500+ from_ruid = use->ruid;
501+
502+ if (use && GET_MODE (*use->usep) == Pmode)
503+ {
504+ bool delete_add = false;
505+ rtx use_insn = use->insn;
506+ int use_ruid = use->ruid;
507+
508+ /* Avoid moving the add insn past a jump. */
509+ if (must_move_add && use_ruid <= last_jump_ruid)
510+ break;
511+
512+ /* If the add clobbers another hard reg in parallel, don't move
513+ it past a real set of this hard reg. */
514+ if (must_move_add && clobbered_regno >= 0
515+ && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
516+ break;
517+
518+ gcc_assert (reg_state[regno].store_ruid <= use_ruid);
519+ /* Avoid moving a use of ADDREG past a point where it is stored. */
520+ if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
521+ break;
522+
523+ /* We also must not move the addition past an insn that sets
524+ the same register, unless we can combine two add insns. */
525+ if (must_move_add && reg_state[regno].store_ruid == use_ruid)
526+ {
527+ if (use->containing_mem == NULL_RTX)
528+ delete_add = true;
529+ else
530+ break;
531+ }
532+
533+ if (try_replace_in_use (use, reg, src))
534+ {
535+ reload_combine_purge_insn_uses (use_insn);
536+ reload_combine_note_use (&PATTERN (use_insn), use_insn,
537+ use_ruid, NULL_RTX);
538+
539+ if (delete_add)
540+ {
541+ fixup_debug_insns (reg, src, insn, use_insn);
542+ delete_insn (insn);
543+ return true;
544+ }
545+ if (must_move_add)
546+ {
547+ add_moved_after_insn = use_insn;
548+ add_moved_after_ruid = use_ruid;
549+ }
550+ continue;
551+ }
552+ }
553+ /* If we get here, we couldn't handle this use. */
554+ if (must_move_add)
555+ break;
556+ }
557+ while (use);
558+
559+ if (!must_move_add || add_moved_after_insn == NULL_RTX)
560+ /* Process the add normally. */
561+ return false;
562+
563+ fixup_debug_insns (reg, src, insn, add_moved_after_insn);
564+
565+ reorder_insns (insn, insn, add_moved_after_insn);
566+ reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
567+ reload_combine_split_ruids (add_moved_after_ruid - 1);
568+ reload_combine_note_use (&PATTERN (insn), insn,
569+ add_moved_after_ruid, NULL_RTX);
570+ reg_state[regno].store_ruid = add_moved_after_ruid;
571+
572+ return true;
573+}
574+
575+/* Called by reload_combine when scanning INSN. Try to detect a pattern we
576+ can handle and improve. Return true if no further processing is needed on
577+ INSN; false if it wasn't recognized and should be handled normally. */
578+
579+static bool
580+reload_combine_recognize_pattern (rtx insn)
581+{
582+ rtx set, reg, src;
583+ unsigned int regno;
584+
585+ set = single_set (insn);
586+ if (set == NULL_RTX)
587+ return false;
588+
589+ reg = SET_DEST (set);
590+ src = SET_SRC (set);
591+ if (!REG_P (reg)
592+ || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1)
593+ return false;
594+
595+ regno = REGNO (reg);
596+
597+ /* Look for (set (REGX) (CONST_INT))
598+ (set (REGX) (PLUS (REGX) (REGY)))
599+ ...
600+ ... (MEM (REGX)) ...
601+ and convert it to
602+ (set (REGZ) (CONST_INT))
603+ ...
604+ ... (MEM (PLUS (REGZ) (REGY)))... .
605+
606+ First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
607+ and that we know all uses of REGX before it dies.
608+ Also, explicitly check that REGX != REGY; our life information
609+ does not yet show whether REGY changes in this insn. */
610+
611+ if (GET_CODE (src) == PLUS
612+ && reg_state[regno].all_offsets_match
613+ && last_index_reg != -1
614+ && REG_P (XEXP (src, 1))
615+ && rtx_equal_p (XEXP (src, 0), reg)
616+ && !rtx_equal_p (XEXP (src, 1), reg)
617+ && reg_state[regno].use_index >= 0
618+ && reg_state[regno].use_index < RELOAD_COMBINE_MAX_USES
619+ && last_label_ruid < reg_state[regno].use_ruid)
620+ {
621+ rtx base = XEXP (src, 1);
622+ rtx prev = prev_nonnote_insn (insn);
623+ rtx prev_set = prev ? single_set (prev) : NULL_RTX;
624+ rtx index_reg = NULL_RTX;
625+ rtx reg_sum = NULL_RTX;
626+ int i;
627+
628+ /* Now we need to set INDEX_REG to an index register (denoted as
629+ REGZ in the illustration above) and REG_SUM to the expression
630+ register+register that we want to use to substitute uses of REG
631+ (typically in MEMs) with. First check REG and BASE for being
632+ index registers; we can use them even if they are not dead. */
633+ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
634+ || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
635+ REGNO (base)))
636+ {
637+ index_reg = reg;
638+ reg_sum = src;
639+ }
640+ else
641+ {
642+ /* Otherwise, look for a free index register. Since we have
643+ checked above that neither REG nor BASE are index registers,
644+ if we find anything at all, it will be different from these
645+ two registers. */
646+ for (i = first_index_reg; i <= last_index_reg; i++)
647+ {
648+ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
649+ && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
650+ && reg_state[i].store_ruid <= reg_state[regno].use_ruid
651+ && (call_used_regs[i] || df_regs_ever_live_p (i))
652+ && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
653+ && !fixed_regs[i] && !global_regs[i]
654+ && hard_regno_nregs[i][GET_MODE (reg)] == 1
655+ && targetm.hard_regno_scratch_ok (i))
656+ {
657+ index_reg = gen_rtx_REG (GET_MODE (reg), i);
658+ reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
659+ break;
660+ }
661+ }
662+ }
663+
664+ /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
665+ (REGY), i.e. BASE, is not clobbered before the last use we'll
666+ create. */
667+ if (reg_sum
668+ && prev_set
669+ && CONST_INT_P (SET_SRC (prev_set))
670+ && rtx_equal_p (SET_DEST (prev_set), reg)
671+ && (reg_state[REGNO (base)].store_ruid
672+ <= reg_state[regno].use_ruid))
673+ {
674+ /* Change destination register and, if necessary, the constant
675+ value in PREV, the constant loading instruction. */
676+ validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
677+ if (reg_state[regno].offset != const0_rtx)
678+ validate_change (prev,
679+ &SET_SRC (prev_set),
680+ GEN_INT (INTVAL (SET_SRC (prev_set))
681+ + INTVAL (reg_state[regno].offset)),
682+ 1);
683+
684+ /* Now for every use of REG that we have recorded, replace REG
685+ with REG_SUM. */
686+ for (i = reg_state[regno].use_index;
687+ i < RELOAD_COMBINE_MAX_USES; i++)
688+ validate_unshare_change (reg_state[regno].reg_use[i].insn,
689+ reg_state[regno].reg_use[i].usep,
690+ /* Each change must have its own
691+ replacement. */
692+ reg_sum, 1);
693+
694+ if (apply_change_group ())
695+ {
696+ struct reg_use *lowest_ruid = NULL;
697+
698+ /* For every new use of REG_SUM, we have to record the use
699+ of BASE therein, i.e. operand 1. */
700+ for (i = reg_state[regno].use_index;
701+ i < RELOAD_COMBINE_MAX_USES; i++)
702+ {
703+ struct reg_use *use = reg_state[regno].reg_use + i;
704+ reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
705+ use->ruid, use->containing_mem);
706+ if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
707+ lowest_ruid = use;
708+ }
709+
710+ fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
711+
712+ /* Delete the reg-reg addition. */
713+ delete_insn (insn);
714+
715+ if (reg_state[regno].offset != const0_rtx)
716+ /* Previous REG_EQUIV / REG_EQUAL notes for PREV
717+ are now invalid. */
718+ remove_reg_equal_equiv_notes (prev);
719+
720+ reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
721+ return true;
722+ }
723+ }
724+ }
725+ return false;
726+}
727+
728 static void
729 reload_combine (void)
730 {
731- rtx insn, set;
732- int first_index_reg = -1;
733- int last_index_reg = 0;
734+ rtx insn, prev;
735 int i;
736 basic_block bb;
737 unsigned int r;
738- int last_label_ruid;
739 int min_labelno, n_labels;
740 HARD_REG_SET ever_live_at_start, *label_live;
741
742- /* If reg+reg can be used in offsetable memory addresses, the main chunk of
743- reload has already used it where appropriate, so there is no use in
744- trying to generate it now. */
745- if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
746- return;
747-
748 /* To avoid wasting too much time later searching for an index register,
749 determine the minimum and maximum index register numbers. */
750- for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
751- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
752- {
753- if (first_index_reg == -1)
754- first_index_reg = r;
755+ if (INDEX_REG_CLASS == NO_REGS)
756+ last_index_reg = -1;
757+ else if (first_index_reg == -1 && last_index_reg == 0)
758+ {
759+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
760+ if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
761+ {
762+ if (first_index_reg == -1)
763+ first_index_reg = r;
764
765- last_index_reg = r;
766- }
767+ last_index_reg = r;
768+ }
769
770- /* If no index register is available, we can quit now. */
771- if (first_index_reg == -1)
772- return;
773+ /* If no index register is available, we can quit now. Set LAST_INDEX_REG
774+ to -1 so we'll know to quit early the next time we get here. */
775+ if (first_index_reg == -1)
776+ {
777+ last_index_reg = -1;
778+ return;
779+ }
780+ }
781
782 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
783 information is a bit fuzzy immediately after reload, but it's
784@@ -753,20 +1278,23 @@ reload_combine (void)
785 }
786
787 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
788- last_label_ruid = reload_combine_ruid = 0;
789+ last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
790 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
791 {
792- reg_state[r].store_ruid = reload_combine_ruid;
793+ reg_state[r].store_ruid = 0;
794+ reg_state[r].real_store_ruid = 0;
795 if (fixed_regs[r])
796 reg_state[r].use_index = -1;
797 else
798 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
799 }
800
801- for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
802+ for (insn = get_last_insn (); insn; insn = prev)
803 {
804 rtx note;
805
806+ prev = PREV_INSN (insn);
807+
808 /* We cannot do our optimization across labels. Invalidating all the use
809 information we have would be costly, so we just note where the label
810 is and then later disable any optimization that would cross it. */
811@@ -777,141 +1305,17 @@ reload_combine (void)
812 if (! fixed_regs[r])
813 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
814
815- if (! INSN_P (insn))
816+ if (! NONDEBUG_INSN_P (insn))
817 continue;
818
819 reload_combine_ruid++;
820
821- /* Look for (set (REGX) (CONST_INT))
822- (set (REGX) (PLUS (REGX) (REGY)))
823- ...
824- ... (MEM (REGX)) ...
825- and convert it to
826- (set (REGZ) (CONST_INT))
827- ...
828- ... (MEM (PLUS (REGZ) (REGY)))... .
829-
830- First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
831- and that we know all uses of REGX before it dies.
832- Also, explicitly check that REGX != REGY; our life information
833- does not yet show whether REGY changes in this insn. */
834- set = single_set (insn);
835- if (set != NULL_RTX
836- && REG_P (SET_DEST (set))
837- && (hard_regno_nregs[REGNO (SET_DEST (set))]
838- [GET_MODE (SET_DEST (set))]
839- == 1)
840- && GET_CODE (SET_SRC (set)) == PLUS
841- && REG_P (XEXP (SET_SRC (set), 1))
842- && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
843- && !rtx_equal_p (XEXP (SET_SRC (set), 1), SET_DEST (set))
844- && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
845- {
846- rtx reg = SET_DEST (set);
847- rtx plus = SET_SRC (set);
848- rtx base = XEXP (plus, 1);
849- rtx prev = prev_nonnote_nondebug_insn (insn);
850- rtx prev_set = prev ? single_set (prev) : NULL_RTX;
851- unsigned int regno = REGNO (reg);
852- rtx index_reg = NULL_RTX;
853- rtx reg_sum = NULL_RTX;
854-
855- /* Now we need to set INDEX_REG to an index register (denoted as
856- REGZ in the illustration above) and REG_SUM to the expression
857- register+register that we want to use to substitute uses of REG
858- (typically in MEMs) with. First check REG and BASE for being
859- index registers; we can use them even if they are not dead. */
860- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
861- || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
862- REGNO (base)))
863- {
864- index_reg = reg;
865- reg_sum = plus;
866- }
867- else
868- {
869- /* Otherwise, look for a free index register. Since we have
870- checked above that neither REG nor BASE are index registers,
871- if we find anything at all, it will be different from these
872- two registers. */
873- for (i = first_index_reg; i <= last_index_reg; i++)
874- {
875- if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
876- i)
877- && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
878- && reg_state[i].store_ruid <= reg_state[regno].use_ruid
879- && hard_regno_nregs[i][GET_MODE (reg)] == 1)
880- {
881- index_reg = gen_rtx_REG (GET_MODE (reg), i);
882- reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
883- break;
884- }
885- }
886- }
887-
888- /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
889- (REGY), i.e. BASE, is not clobbered before the last use we'll
890- create. */
891- if (reg_sum
892- && prev_set
893- && CONST_INT_P (SET_SRC (prev_set))
894- && rtx_equal_p (SET_DEST (prev_set), reg)
895- && reg_state[regno].use_index >= 0
896- && (reg_state[REGNO (base)].store_ruid
897- <= reg_state[regno].use_ruid))
898- {
899- int i;
900-
901- /* Change destination register and, if necessary, the constant
902- value in PREV, the constant loading instruction. */
903- validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
904- if (reg_state[regno].offset != const0_rtx)
905- validate_change (prev,
906- &SET_SRC (prev_set),
907- GEN_INT (INTVAL (SET_SRC (prev_set))
908- + INTVAL (reg_state[regno].offset)),
909- 1);
910+ if (control_flow_insn_p (insn))
911+ last_jump_ruid = reload_combine_ruid;
912
913- /* Now for every use of REG that we have recorded, replace REG
914- with REG_SUM. */
915- for (i = reg_state[regno].use_index;
916- i < RELOAD_COMBINE_MAX_USES; i++)
917- validate_unshare_change (reg_state[regno].reg_use[i].insn,
918- reg_state[regno].reg_use[i].usep,
919- /* Each change must have its own
920- replacement. */
921- reg_sum, 1);
922-
923- if (apply_change_group ())
924- {
925- /* For every new use of REG_SUM, we have to record the use
926- of BASE therein, i.e. operand 1. */
927- for (i = reg_state[regno].use_index;
928- i < RELOAD_COMBINE_MAX_USES; i++)
929- reload_combine_note_use
930- (&XEXP (*reg_state[regno].reg_use[i].usep, 1),
931- reg_state[regno].reg_use[i].insn);
932-
933- if (reg_state[REGNO (base)].use_ruid
934- > reg_state[regno].use_ruid)
935- reg_state[REGNO (base)].use_ruid
936- = reg_state[regno].use_ruid;
937-
938- /* Delete the reg-reg addition. */
939- delete_insn (insn);
940-
941- if (reg_state[regno].offset != const0_rtx)
942- /* Previous REG_EQUIV / REG_EQUAL notes for PREV
943- are now invalid. */
944- remove_reg_equal_equiv_notes (prev);
945-
946- reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
947- reg_state[REGNO (index_reg)].store_ruid
948- = reload_combine_ruid;
949- continue;
950- }
951- }
952- }
953+ if (reload_combine_recognize_const_pattern (insn)
954+ || reload_combine_recognize_pattern (insn))
955+ continue;
956
957 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
958
959@@ -967,7 +1371,8 @@ reload_combine (void)
960 reg_state[i].use_index = -1;
961 }
962
963- reload_combine_note_use (&PATTERN (insn), insn);
964+ reload_combine_note_use (&PATTERN (insn), insn,
965+ reload_combine_ruid, NULL_RTX);
966 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
967 {
968 if (REG_NOTE_KIND (note) == REG_INC
969@@ -976,6 +1381,7 @@ reload_combine (void)
970 int regno = REGNO (XEXP (note, 0));
971
972 reg_state[regno].store_ruid = reload_combine_ruid;
973+ reg_state[regno].real_store_ruid = reload_combine_ruid;
974 reg_state[regno].use_index = -1;
975 }
976 }
977@@ -985,8 +1391,8 @@ reload_combine (void)
978 }
979
980 /* Check if DST is a register or a subreg of a register; if it is,
981- update reg_state[regno].store_ruid and reg_state[regno].use_index
982- accordingly. Called via note_stores from reload_combine. */
983+ update store_ruid, real_store_ruid and use_index in the reg_state
984+ structure accordingly. Called via note_stores from reload_combine. */
985
986 static void
987 reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
988@@ -1010,14 +1416,14 @@ reload_combine_note_store (rtx dst, cons
989 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
990 careful with registers / register parts that are not full words.
991 Similarly for ZERO_EXTRACT. */
992- if (GET_CODE (set) != SET
993- || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
994+ if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
995 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
996 {
997 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
998 {
999 reg_state[i].use_index = -1;
1000 reg_state[i].store_ruid = reload_combine_ruid;
1001+ reg_state[i].real_store_ruid = reload_combine_ruid;
1002 }
1003 }
1004 else
1005@@ -1025,6 +1431,8 @@ reload_combine_note_store (rtx dst, cons
1006 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1007 {
1008 reg_state[i].store_ruid = reload_combine_ruid;
1009+ if (GET_CODE (set) == SET)
1010+ reg_state[i].real_store_ruid = reload_combine_ruid;
1011 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1012 }
1013 }
1014@@ -1035,7 +1443,7 @@ reload_combine_note_store (rtx dst, cons
1015 *XP is the pattern of INSN, or a part of it.
1016 Called from reload_combine, and recursively by itself. */
1017 static void
1018-reload_combine_note_use (rtx *xp, rtx insn)
1019+reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem)
1020 {
1021 rtx x = *xp;
1022 enum rtx_code code = x->code;
1023@@ -1048,7 +1456,7 @@ reload_combine_note_use (rtx *xp, rtx in
1024 case SET:
1025 if (REG_P (SET_DEST (x)))
1026 {
1027- reload_combine_note_use (&SET_SRC (x), insn);
1028+ reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
1029 return;
1030 }
1031 break;
1032@@ -1104,6 +1512,11 @@ reload_combine_note_use (rtx *xp, rtx in
1033 return;
1034 }
1035
1036+ /* We may be called to update uses in previously seen insns.
1037+ Don't add uses beyond the last store we saw. */
1038+ if (ruid < reg_state[regno].store_ruid)
1039+ return;
1040+
1041 /* If this register is already used in some unknown fashion, we
1042 can't do anything.
1043 If we decrement the index from zero to -1, we can't store more
1044@@ -1112,29 +1525,34 @@ reload_combine_note_use (rtx *xp, rtx in
1045 if (use_index < 0)
1046 return;
1047
1048- if (use_index != RELOAD_COMBINE_MAX_USES - 1)
1049- {
1050- /* We have found another use for a register that is already
1051- used later. Check if the offsets match; if not, mark the
1052- register as used in an unknown fashion. */
1053- if (! rtx_equal_p (offset, reg_state[regno].offset))
1054- {
1055- reg_state[regno].use_index = -1;
1056- return;
1057- }
1058- }
1059- else
1060+ if (use_index == RELOAD_COMBINE_MAX_USES - 1)
1061 {
1062 /* This is the first use of this register we have seen since we
1063 marked it as dead. */
1064 reg_state[regno].offset = offset;
1065- reg_state[regno].use_ruid = reload_combine_ruid;
1066+ reg_state[regno].all_offsets_match = true;
1067+ reg_state[regno].use_ruid = ruid;
1068+ }
1069+ else
1070+ {
1071+ if (reg_state[regno].use_ruid > ruid)
1072+ reg_state[regno].use_ruid = ruid;
1073+
1074+ if (! rtx_equal_p (offset, reg_state[regno].offset))
1075+ reg_state[regno].all_offsets_match = false;
1076 }
1077+
1078 reg_state[regno].reg_use[use_index].insn = insn;
1079+ reg_state[regno].reg_use[use_index].ruid = ruid;
1080+ reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
1081 reg_state[regno].reg_use[use_index].usep = xp;
1082 return;
1083 }
1084
1085+ case MEM:
1086+ containing_mem = x;
1087+ break;
1088+
1089 default:
1090 break;
1091 }
1092@@ -1144,11 +1562,12 @@ reload_combine_note_use (rtx *xp, rtx in
1093 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1094 {
1095 if (fmt[i] == 'e')
1096- reload_combine_note_use (&XEXP (x, i), insn);
1097+ reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
1098 else if (fmt[i] == 'E')
1099 {
1100 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1101- reload_combine_note_use (&XVECEXP (x, i, j), insn);
1102+ reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1103+ containing_mem);
1104 }
1105 }
1106 }
1107@@ -1196,9 +1615,10 @@ static int move2add_last_label_luid;
1108 while REG is known to already have value (SYM + offset).
1109 This function tries to change INSN into an add instruction
1110 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1111- It also updates the information about REG's known value. */
1112+ It also updates the information about REG's known value.
1113+ Return true if we made a change. */
1114
1115-static void
1116+static bool
1117 move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn)
1118 {
1119 rtx pat = PATTERN (insn);
1120@@ -1207,6 +1627,7 @@ move2add_use_add2_insn (rtx reg, rtx sym
1121 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[regno],
1122 GET_MODE (reg));
1123 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1124+ bool changed = false;
1125
1126 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1127 use (set (reg) (reg)) instead.
1128@@ -1221,13 +1642,13 @@ move2add_use_add2_insn (rtx reg, rtx sym
1129 (reg)), would be discarded. Maybe we should
1130 try a truncMN pattern? */
1131 if (INTVAL (off) == reg_offset [regno])
1132- validate_change (insn, &SET_SRC (pat), reg, 0);
1133+ changed = validate_change (insn, &SET_SRC (pat), reg, 0);
1134 }
1135 else if (rtx_cost (new_src, PLUS, speed) < rtx_cost (src, SET, speed)
1136 && have_add2_insn (reg, new_src))
1137 {
1138 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
1139- validate_change (insn, &SET_SRC (pat), tem, 0);
1140+ changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1141 }
1142 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
1143 {
1144@@ -1252,8 +1673,9 @@ move2add_use_add2_insn (rtx reg, rtx sym
1145 gen_rtx_STRICT_LOW_PART (VOIDmode,
1146 narrow_reg),
1147 narrow_src);
1148- if (validate_change (insn, &PATTERN (insn),
1149- new_set, 0))
1150+ changed = validate_change (insn, &PATTERN (insn),
1151+ new_set, 0);
1152+ if (changed)
1153 break;
1154 }
1155 }
1156@@ -1263,6 +1685,7 @@ move2add_use_add2_insn (rtx reg, rtx sym
1157 reg_mode[regno] = GET_MODE (reg);
1158 reg_symbol_ref[regno] = sym;
1159 reg_offset[regno] = INTVAL (off);
1160+ return changed;
1161 }
1162
1163
1164@@ -1272,9 +1695,10 @@ move2add_use_add2_insn (rtx reg, rtx sym
1165 value (SYM + offset) and change INSN into an add instruction
1166 (set (REG) (plus (the found register) (OFF - offset))) if such
1167 a register is found. It also updates the information about
1168- REG's known value. */
1169+ REG's known value.
1170+ Return true iff we made a change. */
1171
1172-static void
1173+static bool
1174 move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn)
1175 {
1176 rtx pat = PATTERN (insn);
1177@@ -1284,6 +1708,7 @@ move2add_use_add3_insn (rtx reg, rtx sym
1178 int min_regno;
1179 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1180 int i;
1181+ bool changed = false;
1182
1183 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1184 if (reg_set_luid[i] > move2add_last_label_luid
1185@@ -1328,20 +1753,25 @@ move2add_use_add3_insn (rtx reg, rtx sym
1186 GET_MODE (reg));
1187 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1188 }
1189- validate_change (insn, &SET_SRC (pat), tem, 0);
1190+ if (validate_change (insn, &SET_SRC (pat), tem, 0))
1191+ changed = true;
1192 }
1193 reg_set_luid[regno] = move2add_luid;
1194 reg_base_reg[regno] = -1;
1195 reg_mode[regno] = GET_MODE (reg);
1196 reg_symbol_ref[regno] = sym;
1197 reg_offset[regno] = INTVAL (off);
1198+ return changed;
1199 }
1200
1201-static void
1202+/* Convert move insns with constant inputs to additions if they are cheaper.
1203+ Return true if any changes were made. */
1204+static bool
1205 reload_cse_move2add (rtx first)
1206 {
1207 int i;
1208 rtx insn;
1209+ bool changed = false;
1210
1211 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1212 {
1213@@ -1402,7 +1832,7 @@ reload_cse_move2add (rtx first)
1214 && reg_base_reg[regno] < 0
1215 && reg_symbol_ref[regno] == NULL_RTX)
1216 {
1217- move2add_use_add2_insn (reg, NULL_RTX, src, insn);
1218+ changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
1219 continue;
1220 }
1221
1222@@ -1463,6 +1893,7 @@ reload_cse_move2add (rtx first)
1223 }
1224 if (success)
1225 delete_insn (insn);
1226+ changed |= success;
1227 insn = next;
1228 reg_mode[regno] = GET_MODE (reg);
1229 reg_offset[regno] =
1230@@ -1508,12 +1939,12 @@ reload_cse_move2add (rtx first)
1231 && reg_base_reg[regno] < 0
1232 && reg_symbol_ref[regno] != NULL_RTX
1233 && rtx_equal_p (sym, reg_symbol_ref[regno]))
1234- move2add_use_add2_insn (reg, sym, off, insn);
1235+ changed |= move2add_use_add2_insn (reg, sym, off, insn);
1236
1237 /* Otherwise, we have to find a register whose value is sum
1238 of sym and some constant value. */
1239 else
1240- move2add_use_add3_insn (reg, sym, off, insn);
1241+ changed |= move2add_use_add3_insn (reg, sym, off, insn);
1242
1243 continue;
1244 }
1245@@ -1568,6 +1999,7 @@ reload_cse_move2add (rtx first)
1246 }
1247 }
1248 }
1249+ return changed;
1250 }
1251
1252 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
1253Index: gcc-4.5/testsuite/gcc.target/arm/pr42235.c
1254===================================================================
1255--- /dev/null
1256+++ gcc-4.5/testsuite/gcc.target/arm/pr42235.c
1257@@ -0,0 +1,11 @@
1258+/* { dg-options "-mthumb -O2 -march=armv5te" } */
1259+/* { dg-require-effective-target arm_thumb1_ok } */
1260+/* { dg-final { scan-assembler-not "add\[\\t \]*r.,\[\\t \]*r.,\[\\t \]*\#1" } } */
1261+/* { dg-final { scan-assembler-not "add\[\\t \]*r.,\[\\t \]*\#1" } } */
1262+
1263+#include <string.h>
1264+
1265+int foo (char *x)
1266+{
1267+ memset (x, 0, 6);
1268+}