![]() |
|
|||
0001 /* SPDX-License-Identifier: GPL-2.0-only */ 0002 /* 0003 * Copyright 2011 Calxeda, Inc. 0004 * Based on PPC version Copyright 2007 MontaVista Software, Inc. 0005 */ 0006 #ifndef ASM_EDAC_H 0007 #define ASM_EDAC_H 0008 /* 0009 * ECC atomic, DMA, SMP and interrupt safe scrub function. 0010 * Implements the per arch edac_atomic_scrub() that EDAC use for software 0011 * ECC scrubbing. It reads memory and then writes back the original 0012 * value, allowing the hardware to detect and correct memory errors. 0013 */ 0014 0015 static inline void edac_atomic_scrub(void *va, u32 size) 0016 { 0017 #if __LINUX_ARM_ARCH__ >= 6 0018 unsigned int *virt_addr = va; 0019 unsigned int temp, temp2; 0020 unsigned int i; 0021 0022 for (i = 0; i < size / sizeof(*virt_addr); i++, virt_addr++) { 0023 /* Very carefully read and write to memory atomically 0024 * so we are interrupt, DMA and SMP safe. 0025 */ 0026 __asm__ __volatile__("\n" 0027 "1: ldrex %0, [%2]\n" 0028 " strex %1, %0, [%2]\n" 0029 " teq %1, #0\n" 0030 " bne 1b\n" 0031 : "=&r"(temp), "=&r"(temp2) 0032 : "r"(virt_addr) 0033 : "cc"); 0034 } 0035 #endif 0036 } 0037 0038 #endif
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.1.0 LXR engine. The LXR team |
![]() ![]() |