68 |
68 |
69 |
69 |
70 EXPORT_C __NAKED__ void umemget32(TAny* /*aKernAddr*/, const TAny* /*aUserAddr*/, TInt /*aLength*/) |
70 EXPORT_C __NAKED__ void umemget32(TAny* /*aKernAddr*/, const TAny* /*aUserAddr*/, TInt /*aLength*/) |
71 { |
71 { |
72 ASM_ASSERT_PAGING_SAFE |
72 ASM_ASSERT_PAGING_SAFE |
|
73 |
73 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
74 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
75 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
74 asm("stmfd sp!, {r11, lr} "); |
76 asm("stmfd sp!, {r11, lr} "); |
75 asm("subs r12, r2, #1"); |
77 asm("subs r12, r2, #1"); |
76 asm("ldrhsb r11, [r0]"); // test access to first byte of kernel memory |
78 asm("ldrhsb r11, [r0]"); // test access to first byte of kernel memory |
77 asm("ldrhsb r11, [r0,r12]"); // test access to last byte of kernel memory |
79 asm("ldrhsb r11, [r0,r12]"); // test access to last byte of kernel memory |
78 USER_MEMORY_GUARD_OFF(,r11,r12); |
80 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
79 asm("bl 1f"); |
81 asm("bl 0f"); // call to label below |
80 USER_MEMORY_GUARD_RESTORE(r11,r12); |
82 USER_MEMORY_GUARD_RESTORE(r11,r12); |
81 asm("ldmfd sp!, {r11, pc} "); |
83 asm("ldmfd sp!, {r11, pc} "); |
82 asm("1:"); |
84 |
83 #endif |
85 asm("0:"); |
84 PLD(1); |
86 #endif |
|
87 |
85 #ifdef _DEBUG |
88 #ifdef _DEBUG |
86 asm("tst r2, #3 "); // check length is a whole number of words |
89 asm("tst r2, #3 "); // check length is a whole number of words |
87 CUMEM_FAULT(ne, KL::EWordMoveLengthNotMultipleOf4); |
90 CUMEM_FAULT(ne, KL::EWordMoveLengthNotMultipleOf4); |
88 #endif |
91 #endif |
|
92 |
|
93 PLD(1); |
89 asm("_umemget_word_aligned: "); |
94 asm("_umemget_word_aligned: "); |
90 asm("stmfd sp!, {r4, lr} "); |
95 asm("stmfd sp!, {r4, lr} "); |
91 asm("subs ip, r2, #32 "); |
96 asm("subs ip, r2, #32 "); |
92 asm("blo _umemget32_small_copy "); |
97 asm("blo _umemget32_small_copy "); |
93 PLD_ioff(1, 32); |
98 PLD_ioff(1, 32); |
167 { |
172 { |
168 // Optimised for aligned transfers, as unaligned are very rare in practice |
173 // Optimised for aligned transfers, as unaligned are very rare in practice |
169 |
174 |
170 ASM_ASSERT_PAGING_SAFE |
175 ASM_ASSERT_PAGING_SAFE |
171 asm("umemget_no_paging_assert:"); |
176 asm("umemget_no_paging_assert:"); |
|
177 |
172 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
178 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
179 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
173 asm("stmfd sp!, {r11, lr} "); |
180 asm("stmfd sp!, {r11, lr} "); |
174 asm("subs r12, r2, #1"); |
181 asm("subs r12, r2, #1"); |
175 asm("ldrhsb r11, [r0]"); // test access to first byte of kernel memory |
182 asm("ldrhsb r11, [r0]"); // test access to first byte of kernel memory |
176 asm("ldrhsb r11, [r0,r12]"); // test access to last byte of kernel memory |
183 asm("ldrhsb r11, [r0,r12]"); // test access to last byte of kernel memory |
177 USER_MEMORY_GUARD_OFF(,r11,r12); |
184 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
178 asm("bl 1f"); |
185 asm("bl 0f"); // call to label below |
179 USER_MEMORY_GUARD_RESTORE(r11,r12); |
186 USER_MEMORY_GUARD_RESTORE(r11,r12); |
180 asm("ldmfd sp!, {r11, pc} "); |
187 asm("ldmfd sp!, {r11, pc} "); |
181 asm("1:"); |
188 |
182 #endif |
189 asm("0:"); |
|
190 #endif |
|
191 |
183 PLD(1); |
192 PLD(1); |
184 asm("tst r0, #3 "); |
193 asm("tst r0, #3 "); |
185 asm("tsteq r1, #3 "); |
194 asm("tsteq r1, #3 "); |
186 asm("beq _umemget_word_aligned "); |
195 asm("beq _umemget_word_aligned "); |
187 asm("cmp r2, #8 "); |
196 asm("cmp r2, #8 "); |
297 |
306 |
298 |
307 |
299 EXPORT_C __NAKED__ void umemput32(TAny* /*aUserAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/) |
308 EXPORT_C __NAKED__ void umemput32(TAny* /*aUserAddr*/, const TAny* /*aKernAddr*/, TInt /*aLength*/) |
300 { |
309 { |
301 ASM_ASSERT_DATA_PAGING_SAFE |
310 ASM_ASSERT_DATA_PAGING_SAFE |
|
311 |
302 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
312 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
313 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
303 asm("stmfd sp!, {r11, lr} "); |
314 asm("stmfd sp!, {r11, lr} "); |
304 asm("subs r12, r2, #1"); |
315 asm("subs r12, r2, #1"); |
305 asm("ldrhsb r11, [r1]"); // test access to first byte of kernel memory |
316 asm("ldrhsb r11, [r1]"); // test access to first byte of kernel memory |
306 asm("ldrhsb r11, [r1,r12]"); // test access to last byte of kernel memory |
317 asm("ldrhsb r11, [r1,r12]"); // test access to last byte of kernel memory |
307 USER_MEMORY_GUARD_OFF(,r11,r12); |
318 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
308 asm("bl 1f"); |
319 asm("bl 0f"); // call to label below |
309 USER_MEMORY_GUARD_RESTORE(r11,r12); |
320 USER_MEMORY_GUARD_RESTORE(r11,r12); |
310 asm("ldmfd sp!, {r11, pc} "); |
321 asm("ldmfd sp!, {r11, pc} "); |
311 asm("1:"); |
322 |
312 #endif |
323 asm("0:"); |
313 PLD(1); |
324 #endif |
|
325 |
314 #ifdef _DEBUG |
326 #ifdef _DEBUG |
315 asm("tst r2, #3 "); // check length is a whole number of words |
327 asm("tst r2, #3 "); // check length is a whole number of words |
316 CUMEM_FAULT(ne, KL::EWordMoveLengthNotMultipleOf4); |
328 CUMEM_FAULT(ne, KL::EWordMoveLengthNotMultipleOf4); |
317 #endif |
329 #endif |
|
330 |
|
331 PLD(1); |
318 asm("cmp r2, #4 "); // special case for 4 byte copy which is common |
332 asm("cmp r2, #4 "); // special case for 4 byte copy which is common |
319 asm("ldrhs r3, [r1], #4 "); |
333 asm("ldrhs r3, [r1], #4 "); |
320 asm("subhs r2, r2, #4 "); |
334 asm("subhs r2, r2, #4 "); |
321 asm("strhst r3, [r0], #4 "); |
335 asm("strhst r3, [r0], #4 "); |
322 __JUMP(ls,lr); |
336 __JUMP(ls,lr); |
391 |
405 |
392 |
406 |
393 __NAKED__ void uumemcpy32(TAny* /*aUserDst*/, const TAny* /*aUserSrc*/, TInt /*aLength*/) |
407 __NAKED__ void uumemcpy32(TAny* /*aUserDst*/, const TAny* /*aUserSrc*/, TInt /*aLength*/) |
394 { |
408 { |
395 ASM_ASSERT_PAGING_SAFE |
409 ASM_ASSERT_PAGING_SAFE |
|
410 |
396 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
411 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
412 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
397 asm("stmfd sp!, {r11, lr} "); |
413 asm("stmfd sp!, {r11, lr} "); |
398 USER_MEMORY_GUARD_OFF(,r11,r12); |
414 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
399 asm("bl 1f"); |
415 asm("bl 0f"); // call to label below |
400 USER_MEMORY_GUARD_RESTORE(r11,r12); |
416 USER_MEMORY_GUARD_RESTORE(r11,r12); |
401 asm("ldmfd sp!, {r11, pc} "); |
417 asm("ldmfd sp!, {r11, pc} "); |
402 asm("1:"); |
418 |
403 #endif |
419 asm("0:"); |
|
420 #endif |
|
421 |
404 asm("1: "); |
422 asm("1: "); |
405 asm("subs r2, r2, #4 "); |
423 asm("subs r2, r2, #4 "); |
406 asm("ldrplt r3, [r1], #4 "); |
424 asm("ldrplt r3, [r1], #4 "); |
407 asm("strplt r3, [r0], #4 "); |
425 asm("strplt r3, [r0], #4 "); |
408 asm("bpl 1b "); |
426 asm("bpl 1b "); |
411 |
429 |
412 |
430 |
413 __NAKED__ void uumemcpy(TAny* /*aUserDst*/, const TAny* /*aUserSrc*/, TInt /*aLength*/) |
431 __NAKED__ void uumemcpy(TAny* /*aUserDst*/, const TAny* /*aUserSrc*/, TInt /*aLength*/) |
414 { |
432 { |
415 ASM_ASSERT_PAGING_SAFE |
433 ASM_ASSERT_PAGING_SAFE |
|
434 |
416 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
435 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
436 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
417 asm("stmfd sp!, {r11, lr} "); |
437 asm("stmfd sp!, {r11, lr} "); |
418 USER_MEMORY_GUARD_OFF(,r11,r12); |
438 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
419 asm("bl 1f"); |
439 asm("bl 0f"); // call to label below |
420 USER_MEMORY_GUARD_RESTORE(r11,r12); |
440 USER_MEMORY_GUARD_RESTORE(r11,r12); |
421 asm("ldmfd sp!, {r11, pc} "); |
441 asm("ldmfd sp!, {r11, pc} "); |
422 asm("1:"); |
442 |
423 #endif |
443 asm("0:"); |
|
444 #endif |
|
445 |
424 asm("cmp r2, #8 "); |
446 asm("cmp r2, #8 "); |
425 asm("bcs 1f "); |
447 asm("bcs 1f "); |
426 asm("2: "); |
448 asm("2: "); |
427 asm("subs r2, r2, #1 "); |
449 asm("subs r2, r2, #1 "); |
428 asm("ldrplbt r3, [r1], #1 "); |
450 asm("ldrplbt r3, [r1], #1 "); |
508 { |
530 { |
509 // Optimised for word-aligned transfers, as unaligned are very rare in practice |
531 // Optimised for word-aligned transfers, as unaligned are very rare in practice |
510 |
532 |
511 ASM_ASSERT_DATA_PAGING_SAFE |
533 ASM_ASSERT_DATA_PAGING_SAFE |
512 asm("umemput_no_paging_assert:"); |
534 asm("umemput_no_paging_assert:"); |
|
535 |
513 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
536 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
537 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
514 asm("stmfd sp!, {r11, lr} "); |
538 asm("stmfd sp!, {r11, lr} "); |
515 asm("subs r12, r2, #1"); |
539 asm("subs r12, r2, #1"); |
516 asm("ldrhsb r11, [r1]"); // test access to first byte of kernel memory |
540 asm("ldrhsb r11, [r1]"); // test access to first byte of kernel memory |
517 asm("ldrhsb r11, [r1,r12]"); // test access to last byte of kernel memory |
541 asm("ldrhsb r11, [r1,r12]"); // test access to last byte of kernel memory |
518 USER_MEMORY_GUARD_OFF(,r11,r12); |
542 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
519 asm("bl 1f"); |
543 asm("bl 0f"); // call to label below |
520 USER_MEMORY_GUARD_RESTORE(r11,r12); |
544 USER_MEMORY_GUARD_RESTORE(r11,r12); |
521 asm("ldmfd sp!, {r11, pc} "); |
545 asm("ldmfd sp!, {r11, pc} "); |
522 asm("1:"); |
546 |
523 #endif |
547 asm("0:"); |
|
548 #endif |
|
549 |
524 PLD(1); |
550 PLD(1); |
525 asm("tst r0, #3 "); |
551 asm("tst r0, #3 "); |
526 asm("tsteq r1, #3 "); |
552 asm("tsteq r1, #3 "); |
527 asm("beq _umemput_word_aligned "); |
553 asm("beq _umemput_word_aligned "); |
528 |
554 |
611 |
637 |
612 |
638 |
613 EXPORT_C __NAKED__ void umemset(TAny* /*aUserAddr*/, const TUint8 /*aValue*/, TInt /*aLength*/) |
639 EXPORT_C __NAKED__ void umemset(TAny* /*aUserAddr*/, const TUint8 /*aValue*/, TInt /*aLength*/) |
614 { |
640 { |
615 ASM_ASSERT_DATA_PAGING_SAFE |
641 ASM_ASSERT_DATA_PAGING_SAFE |
|
642 |
616 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
643 #ifdef __USER_MEMORY_GUARDS_ENABLED__ |
|
644 // Wrap the workings of this function in an internal call, so we can save/restore UMG state |
617 asm("stmfd sp!, {r11, lr} "); |
645 asm("stmfd sp!, {r11, lr} "); |
618 USER_MEMORY_GUARD_OFF(,r11,r12); |
646 USER_MEMORY_GUARD_OFF(,r11,r12); // leaves UMG mode in r11 |
619 asm("bl 1f"); |
647 asm("bl 0f"); // call to label below |
620 USER_MEMORY_GUARD_RESTORE(r11,r12); |
648 USER_MEMORY_GUARD_RESTORE(r11,r12); |
621 asm("ldmfd sp!, {r11, pc} "); |
649 asm("ldmfd sp!, {r11, pc} "); |
622 asm("1:"); |
650 |
623 #endif |
651 asm("0:"); |
|
652 #endif |
|
653 |
624 asm("cmp r2, #7 "); |
654 asm("cmp r2, #7 "); |
625 asm("bhi 2f "); |
655 asm("bhi 2f "); |
626 asm("1: "); |
656 asm("1: "); |
627 asm("subs r2, r2, #1 "); |
657 asm("subs r2, r2, #1 "); |
628 asm("strplbt r1, [r0], #1 "); |
658 asm("strplbt r1, [r0], #1 "); |