diff --git a/common/memsize.c b/common/memsize.c index 66d5be6a1ff3..ac4459f6af73 100644 --- a/common/memsize.c +++ b/common/memsize.c @@ -7,9 +7,18 @@ #include #include #include +#include +#include DECLARE_GLOBAL_DATA_PTR; +#ifdef CONFIG_SYS_CACHELINE_SIZE +# define MEMSIZE_CACHELINE_SIZE CONFIG_SYS_CACHELINE_SIZE +#else +/* Just use the greatest cache flush alignment requirement I'm aware of */ +# define MEMSIZE_CACHELINE_SIZE 128 +#endif + #ifdef __PPC__ /* * At least on G2 PowerPC cores, sequential accesses to non-existent @@ -20,6 +29,17 @@ DECLARE_GLOBAL_DATA_PTR; # define sync() /* nothing */ #endif +static void dcache_flush_invalidate(volatile long *p) +{ + if (dcache_status()) { + uintptr_t start, stop; + start = ALIGN_DOWN((uintptr_t)p, MEMSIZE_CACHELINE_SIZE); + stop = start + MEMSIZE_CACHELINE_SIZE; + flush_dcache_range(start, stop); + invalidate_dcache_range(start, stop); + } +} + /* * Check memory range for valid RAM. A simple memory test determines * the actually available RAM size between addresses `base' and @@ -41,6 +61,7 @@ long get_ram_size(long *base, long maxsize) save[i++] = *addr; sync(); *addr = ~cnt; + dcache_flush_invalidate(addr); } addr = base; @@ -50,6 +71,8 @@ long get_ram_size(long *base, long maxsize) *addr = 0; sync(); + dcache_flush_invalidate(addr); + if ((val = *addr) != 0) { /* Restore the original data before leaving the function. */ sync();