Skip to content

Commit cf64c2a

Browse files
committed
Merge branch 'work.sparc32' of git://git.kernel.org/pub/scm/linux/kernel/git/viro/vfs
2 parents b9d6243 + 73686e7 commit cf64c2a

File tree

14 files changed

+193
-526
lines changed

14 files changed

+193
-526
lines changed

arch/sparc/include/asm/elf_64.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88

99
#include <asm/ptrace.h>
1010
#include <asm/processor.h>
11-
#include <asm/extable_64.h>
1211
#include <asm/spitfire.h>
1312
#include <asm/adi.h>
1413

arch/sparc/include/asm/extable_64.h renamed to arch/sparc/include/asm/extable.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
/* SPDX-License-Identifier: GPL-2.0 */
2-
#ifndef __ASM_EXTABLE64_H
3-
#define __ASM_EXTABLE64_H
2+
#ifndef __ASM_EXTABLE_H
3+
#define __ASM_EXTABLE_H
44
/*
55
* The exception table consists of pairs of addresses: the first is the
66
* address of an instruction that is allowed to fault, and the second is

arch/sparc/include/asm/uaccess.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
/* SPDX-License-Identifier: GPL-2.0 */
22
#ifndef ___ASM_SPARC_UACCESS_H
33
#define ___ASM_SPARC_UACCESS_H
4+
5+
#include <asm/extable.h>
6+
47
#if defined(__sparc__) && defined(__arch64__)
58
#include <asm/uaccess_64.h>
69
#else

arch/sparc/include/asm/uaccess_32.h

Lines changed: 0 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,6 @@
1313

1414
#include <asm/processor.h>
1515

16-
#define ARCH_HAS_SORT_EXTABLE
17-
#define ARCH_HAS_SEARCH_EXTABLE
18-
1916
/* Sparc is not segmented, however we need to be able to fool access_ok()
2017
* when doing system calls from kernel mode legitimately.
2118
*
@@ -40,36 +37,6 @@
4037
#define __access_ok(addr, size) (__user_ok((addr) & get_fs().seg, (size)))
4138
#define access_ok(addr, size) __access_ok((unsigned long)(addr), size)
4239

43-
/*
44-
* The exception table consists of pairs of addresses: the first is the
45-
* address of an instruction that is allowed to fault, and the second is
46-
* the address at which the program should continue. No registers are
47-
* modified, so it is entirely up to the continuation code to figure out
48-
* what to do.
49-
*
50-
* All the routines below use bits of fixup code that are out of line
51-
* with the main instruction path. This means when everything is well,
52-
* we don't even have to jump over them. Further, they do not intrude
53-
* on our cache or tlb entries.
54-
*
55-
* There is a special way how to put a range of potentially faulting
56-
* insns (like twenty ldd/std's with now intervening other instructions)
57-
* You specify address of first in insn and 0 in fixup and in the next
58-
* exception_table_entry you specify last potentially faulting insn + 1
59-
* and in fixup the routine which should handle the fault.
60-
* That fixup code will get
61-
* (faulting_insn_address - first_insn_in_the_range_address)/4
62-
* in %g2 (ie. index of the faulting instruction in the range).
63-
*/
64-
65-
struct exception_table_entry
66-
{
67-
unsigned long insn, fixup;
68-
};
69-
70-
/* Returns 0 if exception not found and fixup otherwise. */
71-
unsigned long search_extables_range(unsigned long addr, unsigned long *g2);
72-
7340
/* Uh, these should become the main single-value transfer routines..
7441
* They automatically use the right size if we just have the right
7542
* pointer type..
@@ -252,12 +219,7 @@ static inline unsigned long __clear_user(void __user *addr, unsigned long size)
252219
unsigned long ret;
253220

254221
__asm__ __volatile__ (
255-
".section __ex_table,#alloc\n\t"
256-
".align 4\n\t"
257-
".word 1f,3\n\t"
258-
".previous\n\t"
259222
"mov %2, %%o1\n"
260-
"1:\n\t"
261223
"call __bzero\n\t"
262224
" mov %1, %%o0\n\t"
263225
"mov %%o0, %0\n"

arch/sparc/include/asm/uaccess_64.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
#include <linux/string.h>
1111
#include <asm/asi.h>
1212
#include <asm/spitfire.h>
13-
#include <asm/extable_64.h>
1413

1514
#include <asm/processor.h>
1615

arch/sparc/kernel/unaligned_32.c

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
#include <linux/uaccess.h>
1717
#include <linux/smp.h>
1818
#include <linux/perf_event.h>
19+
#include <linux/extable.h>
1920

2021
#include <asm/setup.h>
2122

@@ -213,10 +214,10 @@ static inline int ok_for_kernel(unsigned int insn)
213214

214215
static void kernel_mna_trap_fault(struct pt_regs *regs, unsigned int insn)
215216
{
216-
unsigned long g2 = regs->u_regs [UREG_G2];
217-
unsigned long fixup = search_extables_range(regs->pc, &g2);
217+
const struct exception_table_entry *entry;
218218

219-
if (!fixup) {
219+
entry = search_exception_tables(regs->pc);
220+
if (!entry) {
220221
unsigned long address = compute_effective_address(regs, insn);
221222
if(address < PAGE_SIZE) {
222223
printk(KERN_ALERT "Unable to handle kernel NULL pointer dereference in mna handler");
@@ -232,9 +233,8 @@ static void kernel_mna_trap_fault(struct pt_regs *regs, unsigned int insn)
232233
die_if_kernel("Oops", regs);
233234
/* Not reached */
234235
}
235-
regs->pc = fixup;
236+
regs->pc = entry->fixup;
236237
regs->npc = regs->pc + 4;
237-
regs->u_regs [UREG_G2] = g2;
238238
}
239239

240240
asmlinkage void kernel_unaligned_trap(struct pt_regs *regs, unsigned int insn)

arch/sparc/lib/checksum_32.S

Lines changed: 27 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -155,34 +155,27 @@ cpout: retl ! get outta here
155155
.text; \
156156
.align 4
157157

158-
#define EXT(start,end) \
159-
.section __ex_table,ALLOC; \
160-
.align 4; \
161-
.word start, 0, end, cc_fault; \
162-
.text; \
163-
.align 4
164-
165158
/* This aligned version executes typically in 8.5 superscalar cycles, this
166159
* is the best I can do. I say 8.5 because the final add will pair with
167160
* the next ldd in the main unrolled loop. Thus the pipe is always full.
168161
* If you change these macros (including order of instructions),
169162
* please check the fixup code below as well.
170163
*/
171164
#define CSUMCOPY_BIGCHUNK_ALIGNED(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \
172-
ldd [src + off + 0x00], t0; \
173-
ldd [src + off + 0x08], t2; \
165+
EX(ldd [src + off + 0x00], t0); \
166+
EX(ldd [src + off + 0x08], t2); \
174167
addxcc t0, sum, sum; \
175-
ldd [src + off + 0x10], t4; \
168+
EX(ldd [src + off + 0x10], t4); \
176169
addxcc t1, sum, sum; \
177-
ldd [src + off + 0x18], t6; \
170+
EX(ldd [src + off + 0x18], t6); \
178171
addxcc t2, sum, sum; \
179-
std t0, [dst + off + 0x00]; \
172+
EX(std t0, [dst + off + 0x00]); \
180173
addxcc t3, sum, sum; \
181-
std t2, [dst + off + 0x08]; \
174+
EX(std t2, [dst + off + 0x08]); \
182175
addxcc t4, sum, sum; \
183-
std t4, [dst + off + 0x10]; \
176+
EX(std t4, [dst + off + 0x10]); \
184177
addxcc t5, sum, sum; \
185-
std t6, [dst + off + 0x18]; \
178+
EX(std t6, [dst + off + 0x18]); \
186179
addxcc t6, sum, sum; \
187180
addxcc t7, sum, sum;
188181

@@ -191,39 +184,39 @@ cpout: retl ! get outta here
191184
* Viking MXCC into streaming mode. Ho hum...
192185
*/
193186
#define CSUMCOPY_BIGCHUNK(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \
194-
ldd [src + off + 0x00], t0; \
195-
ldd [src + off + 0x08], t2; \
196-
ldd [src + off + 0x10], t4; \
197-
ldd [src + off + 0x18], t6; \
198-
st t0, [dst + off + 0x00]; \
187+
EX(ldd [src + off + 0x00], t0); \
188+
EX(ldd [src + off + 0x08], t2); \
189+
EX(ldd [src + off + 0x10], t4); \
190+
EX(ldd [src + off + 0x18], t6); \
191+
EX(st t0, [dst + off + 0x00]); \
199192
addxcc t0, sum, sum; \
200-
st t1, [dst + off + 0x04]; \
193+
EX(st t1, [dst + off + 0x04]); \
201194
addxcc t1, sum, sum; \
202-
st t2, [dst + off + 0x08]; \
195+
EX(st t2, [dst + off + 0x08]); \
203196
addxcc t2, sum, sum; \
204-
st t3, [dst + off + 0x0c]; \
197+
EX(st t3, [dst + off + 0x0c]); \
205198
addxcc t3, sum, sum; \
206-
st t4, [dst + off + 0x10]; \
199+
EX(st t4, [dst + off + 0x10]); \
207200
addxcc t4, sum, sum; \
208-
st t5, [dst + off + 0x14]; \
201+
EX(st t5, [dst + off + 0x14]); \
209202
addxcc t5, sum, sum; \
210-
st t6, [dst + off + 0x18]; \
203+
EX(st t6, [dst + off + 0x18]); \
211204
addxcc t6, sum, sum; \
212-
st t7, [dst + off + 0x1c]; \
205+
EX(st t7, [dst + off + 0x1c]); \
213206
addxcc t7, sum, sum;
214207

215208
/* Yuck, 6 superscalar cycles... */
216209
#define CSUMCOPY_LASTCHUNK(src, dst, sum, off, t0, t1, t2, t3) \
217-
ldd [src - off - 0x08], t0; \
218-
ldd [src - off - 0x00], t2; \
210+
EX(ldd [src - off - 0x08], t0); \
211+
EX(ldd [src - off - 0x00], t2); \
219212
addxcc t0, sum, sum; \
220-
st t0, [dst - off - 0x08]; \
213+
EX(st t0, [dst - off - 0x08]); \
221214
addxcc t1, sum, sum; \
222-
st t1, [dst - off - 0x04]; \
215+
EX(st t1, [dst - off - 0x04]); \
223216
addxcc t2, sum, sum; \
224-
st t2, [dst - off - 0x00]; \
217+
EX(st t2, [dst - off - 0x00]); \
225218
addxcc t3, sum, sum; \
226-
st t3, [dst - off + 0x04];
219+
EX(st t3, [dst - off + 0x04]);
227220

228221
/* Handle the end cruft code out of band for better cache patterns. */
229222
cc_end_cruft:
@@ -331,7 +324,6 @@ __csum_partial_copy_sparc_generic:
331324
CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
332325
CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
333326
CSUMCOPY_BIGCHUNK(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
334-
10: EXT(5b, 10b) ! note for exception handling
335327
sub %g1, 128, %g1 ! detract from length
336328
addx %g0, %g7, %g7 ! add in last carry bit
337329
andcc %g1, 0xffffff80, %g0 ! more to csum?
@@ -356,8 +348,7 @@ cctbl: CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x68,%g2,%g3,%g4,%g5)
356348
CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x28,%g2,%g3,%g4,%g5)
357349
CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x18,%g2,%g3,%g4,%g5)
358350
CSUMCOPY_LASTCHUNK(%o0,%o1,%g7,0x08,%g2,%g3,%g4,%g5)
359-
12: EXT(cctbl, 12b) ! note for exception table handling
360-
addx %g0, %g7, %g7
351+
12: addx %g0, %g7, %g7
361352
andcc %o3, 0xf, %g0 ! check for low bits set
362353
ccte: bne cc_end_cruft ! something left, handle it out of band
363354
andcc %o3, 8, %g0 ! begin checks for that code
@@ -367,7 +358,6 @@ ccdbl: CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x00,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o
367358
CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x20,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
368359
CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x40,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
369360
CSUMCOPY_BIGCHUNK_ALIGNED(%o0,%o1,%g7,0x60,%o4,%o5,%g2,%g3,%g4,%g5,%o2,%o3)
370-
11: EXT(ccdbl, 11b) ! note for exception table handling
371361
sub %g1, 128, %g1 ! detract from length
372362
addx %g0, %g7, %g7 ! add in last carry bit
373363
andcc %g1, 0xffffff80, %g0 ! more to csum?

0 commit comments

Comments
 (0)