From 4af57b787b4be09419a2bb48aa705fa87ef41cca Mon Sep 17 00:00:00 2001
From: Tim Abbott <tabbott@ksplice.com>
Date: Sat, 20 Feb 2010 01:03:34 +0100
Subject: [PATCH] Rename .data.cacheline_aligned to .data..cacheline_aligned.

Signed-off-by: Tim Abbott <tabbott@ksplice.com>
Cc: Sam Ravnborg <sam@ravnborg.org>
Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
Signed-off-by: Michal Marek <mmarek@suse.cz>
---
 arch/powerpc/kernel/vmlinux.lds.S | 2 +-
 arch/x86/kernel/init_task.c       | 2 +-
 include/asm-generic/vmlinux.lds.h | 2 +-
 include/linux/cache.h             | 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/arch/powerpc/kernel/vmlinux.lds.S b/arch/powerpc/kernel/vmlinux.lds.S
index dcd01c82e7013..3229c06221611 100644
--- a/arch/powerpc/kernel/vmlinux.lds.S
+++ b/arch/powerpc/kernel/vmlinux.lds.S
@@ -231,7 +231,7 @@ SECTIONS
 		PAGE_ALIGNED_DATA(PAGE_SIZE)
 	}
 
-	.data.cacheline_aligned : AT(ADDR(.data.cacheline_aligned) - LOAD_OFFSET) {
+	.data..cacheline_aligned : AT(ADDR(.data..cacheline_aligned) - LOAD_OFFSET) {
 		CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
 	}
 
diff --git a/arch/x86/kernel/init_task.c b/arch/x86/kernel/init_task.c
index 3a54dcb9cd0e6..43e9ccf449471 100644
--- a/arch/x86/kernel/init_task.c
+++ b/arch/x86/kernel/init_task.c
@@ -34,7 +34,7 @@ EXPORT_SYMBOL(init_task);
 /*
  * per-CPU TSS segments. Threads are completely 'soft' on Linux,
  * no more per-task TSS's. The TSS size is kept cacheline-aligned
- * so they are allowed to end up in the .data.cacheline_aligned
+ * so they are allowed to end up in the .data..cacheline_aligned
  * section. Since TSS's are completely CPU-local, we want them
  * on exact cacheline boundaries, to eliminate cacheline ping-pong.
  */
diff --git a/include/asm-generic/vmlinux.lds.h b/include/asm-generic/vmlinux.lds.h
index 67e652068e0e4..78450aaab9ef3 100644
--- a/include/asm-generic/vmlinux.lds.h
+++ b/include/asm-generic/vmlinux.lds.h
@@ -189,7 +189,7 @@
 
 #define CACHELINE_ALIGNED_DATA(align)					\
 	. = ALIGN(align);						\
-	*(.data.cacheline_aligned)
+	*(.data..cacheline_aligned)
 
 #define INIT_TASK_DATA(align)						\
 	. = ALIGN(align);						\
diff --git a/include/linux/cache.h b/include/linux/cache.h
index 97e24881c4c6f..4c570653ab84f 100644
--- a/include/linux/cache.h
+++ b/include/linux/cache.h
@@ -31,7 +31,7 @@
 #ifndef __cacheline_aligned
 #define __cacheline_aligned					\
   __attribute__((__aligned__(SMP_CACHE_BYTES),			\
-		 __section__(".data.cacheline_aligned")))
+		 __section__(".data..cacheline_aligned")))
 #endif /* __cacheline_aligned */
 
 #ifndef __cacheline_aligned_in_smp
-- 
GitLab