tree: drop last paragraph of GPL copyright header
[coreboot.git] / src / northbridge / amd / amdmct / mct / mct_d_gcc.h
blob68b6bc2ea8ecdb553bfa2019e7a1b3f93df8d622
1 /*
2 * This file is part of the coreboot project.
4 * Copyright (C) 2007 Advanced Micro Devices, Inc.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 of the License.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
17 static inline void _WRMSR(u32 addr, u32 lo, u32 hi)
19 __asm__ volatile (
20 "wrmsr"
22 :"c"(addr),"a"(lo), "d" (hi)
27 static inline void _RDMSR(u32 addr, u32 *lo, u32 *hi)
29 __asm__ volatile (
30 "rdmsr"
31 :"=a"(*lo), "=d" (*hi)
32 :"c"(addr)
37 static inline void _RDTSC(u32 *lo, u32 *hi)
39 __asm__ volatile (
40 "rdtsc"
41 : "=a" (*lo), "=d"(*hi)
46 static inline void _cpu_id(u32 addr, u32 *val)
48 __asm__ volatile(
49 "cpuid"
50 : "=a" (val[0]),
51 "=b" (val[1]),
52 "=c" (val[2]),
53 "=d" (val[3])
54 : "0" (addr));
59 static u32 bsr(u32 x)
61 u8 i;
62 u32 ret = 0;
64 for(i=31; i>0; i--) {
65 if(x & (1<<i)) {
66 ret = i;
67 break;
71 return ret;
76 static u32 bsf(u32 x)
78 u8 i;
79 u32 ret = 32;
81 for(i=0; i<32; i++) {
82 if(x & (1<<i)) {
83 ret = i;
84 break;
88 return ret;
91 #define _MFENCE asm volatile ( "mfence")
93 #define _SFENCE asm volatile ( "sfence" )
95 /* prevent speculative execution of following instructions */
96 #define _EXECFENCE asm volatile ("outb %al, $0xed")
98 #include <cpu/x86/cr.h>
100 u32 SetUpperFSbase(u32 addr_hi);
103 static void proc_CLFLUSH(u32 addr_hi)
105 SetUpperFSbase(addr_hi);
107 __asm__ volatile (
108 /* clflush fs:[eax] */
109 "outb %%al, $0xed\n\t" /* _EXECFENCE */
110 "clflush %%fs:(%0)\n\t"
111 "mfence\n\t"
112 ::"a" (addr_hi<<8)
117 static void WriteLNTestPattern(u32 addr_lo, u8 *buf_a, u32 line_num)
119 __asm__ volatile (
120 /*prevent speculative execution of following instructions*/
121 /* FIXME: needed ? */
122 "outb %%al, $0xed\n\t" /* _EXECFENCE */
123 "1:\n\t"
124 "movdqa (%3), %%xmm0\n\t"
125 "movntdq %%xmm0, %%fs:(%0)\n\t" /* xmm0 is 128 bit */
126 "addl %1, %0\n\t"
127 "addl %1, %3\n\t"
128 "loop 1b\n\t"
129 "mfence\n\t"
131 :: "a" (addr_lo), "d" (16), "c" (line_num * 4), "b"(buf_a)
137 static u32 read32_fs(u32 addr_lo)
139 u32 value;
140 __asm__ volatile (
141 "outb %%al, $0xed\n\t" /* _EXECFENCE */
142 "movl %%fs:(%1), %0\n\t"
143 :"=b"(value): "a" (addr_lo)
145 return value;
148 #ifdef UNUSED_CODE
149 static u8 read8_fs(u32 addr_lo)
151 u8 byte;
152 __asm__ volatile (
153 "outb %%al, $0xed\n\t" /* _EXECFENCE */
154 "movb %%fs:(%1), %b0\n\t"
155 "mfence\n\t"
156 :"=b"(byte): "a" (addr_lo)
158 return byte;
160 #endif
162 static void FlushDQSTestPattern_L9(u32 addr_lo)
164 __asm__ volatile (
165 "outb %%al, $0xed\n\t" /* _EXECFENCE */
166 "clflush %%fs:-128(%%ecx)\n\t"
167 "clflush %%fs:-64(%%ecx)\n\t"
168 "clflush %%fs:(%%ecx)\n\t"
169 "clflush %%fs:64(%%ecx)\n\t"
171 "clflush %%fs:-128(%%eax)\n\t"
172 "clflush %%fs:-64(%%eax)\n\t"
173 "clflush %%fs:(%%eax)\n\t"
174 "clflush %%fs:64(%%eax)\n\t"
176 "clflush %%fs:-128(%%ebx)\n\t"
178 :: "b" (addr_lo+128+8*64), "c"(addr_lo+128),
179 "a"(addr_lo+128+4*64)
185 static __attribute__((noinline)) void FlushDQSTestPattern_L18(u32 addr_lo)
187 __asm__ volatile (
188 "outb %%al, $0xed\n\t" /* _EXECFENCE */
189 "clflush %%fs:-128(%%eax)\n\t"
190 "clflush %%fs:-64(%%eax)\n\t"
191 "clflush %%fs:(%%eax)\n\t"
192 "clflush %%fs:64(%%eax)\n\t"
194 "clflush %%fs:-128(%%edi)\n\t"
195 "clflush %%fs:-64(%%edi)\n\t"
196 "clflush %%fs:(%%edi)\n\t"
197 "clflush %%fs:64(%%edi)\n\t"
199 "clflush %%fs:-128(%%ebx)\n\t"
200 "clflush %%fs:-64(%%ebx)\n\t"
201 "clflush %%fs:(%%ebx)\n\t"
202 "clflush %%fs:64(%%ebx)\n\t"
204 "clflush %%fs:-128(%%ecx)\n\t"
205 "clflush %%fs:-64(%%ecx)\n\t"
206 "clflush %%fs:(%%ecx)\n\t"
207 "clflush %%fs:64(%%ecx)\n\t"
209 "clflush %%fs:-128(%%edx)\n\t"
210 "clflush %%fs:-64(%%edx)\n\t"
212 :: "b" (addr_lo+128+8*64), "c" (addr_lo+128+12*64),
213 "d" (addr_lo +128+16*64), "a"(addr_lo+128),
214 "D"(addr_lo+128+4*64)
219 static void ReadL18TestPattern(u32 addr_lo)
221 // set fs and use fs prefix to access the mem
222 __asm__ volatile (
223 "outb %%al, $0xed\n\t" /* _EXECFENCE */
224 "movl %%fs:-128(%%esi), %%eax\n\t" //TestAddr cache line
225 "movl %%fs:-64(%%esi), %%eax\n\t" //+1
226 "movl %%fs:(%%esi), %%eax\n\t" //+2
227 "movl %%fs:64(%%esi), %%eax\n\t" //+3
229 "movl %%fs:-128(%%edi), %%eax\n\t" //+4
230 "movl %%fs:-64(%%edi), %%eax\n\t" //+5
231 "movl %%fs:(%%edi), %%eax\n\t" //+6
232 "movl %%fs:64(%%edi), %%eax\n\t" //+7
234 "movl %%fs:-128(%%ebx), %%eax\n\t" //+8
235 "movl %%fs:-64(%%ebx), %%eax\n\t" //+9
236 "movl %%fs:(%%ebx), %%eax\n\t" //+10
237 "movl %%fs:64(%%ebx), %%eax\n\t" //+11
239 "movl %%fs:-128(%%ecx), %%eax\n\t" //+12
240 "movl %%fs:-64(%%ecx), %%eax\n\t" //+13
241 "movl %%fs:(%%ecx), %%eax\n\t" //+14
242 "movl %%fs:64(%%ecx), %%eax\n\t" //+15
244 "movl %%fs:-128(%%edx), %%eax\n\t" //+16
245 "movl %%fs:-64(%%edx), %%eax\n\t" //+17
246 "mfence\n\t"
248 :: "a"(0), "b" (addr_lo+128+8*64), "c" (addr_lo+128+12*64),
249 "d" (addr_lo +128+16*64), "S"(addr_lo+128),
250 "D"(addr_lo+128+4*64)
256 static void ReadL9TestPattern(u32 addr_lo)
259 // set fs and use fs prefix to access the mem
260 __asm__ volatile (
261 "outb %%al, $0xed\n\t" /* _EXECFENCE */
263 "movl %%fs:-128(%%ecx), %%eax\n\t" //TestAddr cache line
264 "movl %%fs:-64(%%ecx), %%eax\n\t" //+1
265 "movl %%fs:(%%ecx), %%eax\n\t" //+2
266 "movl %%fs:64(%%ecx), %%eax\n\t" //+3
268 "movl %%fs:-128(%%edx), %%eax\n\t" //+4
269 "movl %%fs:-64(%%edx), %%eax\n\t" //+5
270 "movl %%fs:(%%edx), %%eax\n\t" //+6
271 "movl %%fs:64(%%edx), %%eax\n\t" //+7
273 "movl %%fs:-128(%%ebx), %%eax\n\t" //+8
274 "mfence\n\t"
276 :: "a"(0), "b" (addr_lo+128+8*64), "c"(addr_lo+128),
277 "d"(addr_lo+128+4*64)
283 static void ReadMaxRdLat1CLTestPattern_D(u32 addr)
285 SetUpperFSbase(addr);
287 __asm__ volatile (
288 "outb %%al, $0xed\n\t" /* _EXECFENCE */
289 "movl %%fs:-128(%%esi), %%eax\n\t" //TestAddr cache line
290 "movl %%fs:-64(%%esi), %%eax\n\t" //+1
291 "movl %%fs:(%%esi), %%eax\n\t" //+2
292 "mfence\n\t"
293 :: "a"(0), "S"((addr<<8)+128)
299 static void WriteMaxRdLat1CLTestPattern_D(u32 buf, u32 addr)
301 SetUpperFSbase(addr);
303 __asm__ volatile (
304 "outb %%al, $0xed\n\t" /* _EXECFENCE */
305 "1:\n\t"
306 "movdqa (%3), %%xmm0\n\t"
307 "movntdq %%xmm0, %%fs:(%0)\n\t" /* xmm0 is 128 bit */
308 "addl %1, %0\n\t"
309 "addl %1, %3\n\t"
310 "loop 1b\n\t"
311 "mfence\n\t"
313 :: "a" (addr<<8), "d" (16), "c" (3 * 4), "b"(buf)
318 static void FlushMaxRdLatTestPattern_D(u32 addr)
320 /* Flush a pattern of 72 bit times (per DQ) from cache.
321 * This procedure is used to ensure cache miss on the next read training.
324 SetUpperFSbase(addr);
326 __asm__ volatile (
327 "outb %%al, $0xed\n\t" /* _EXECFENCE */
328 "clflush %%fs:-128(%%esi)\n\t" //TestAddr cache line
329 "clflush %%fs:-64(%%esi)\n\t" //+1
330 "clflush %%fs:(%%esi)\n\t" //+2
331 "mfence\n\t"
333 :: "S"((addr<<8)+128)
338 static u32 stream_to_int(u8 const *p)
340 int i;
341 u32 val;
342 u32 valx;
344 val = 0;
346 for(i=3; i>=0; i--) {
347 val <<= 8;
348 valx = *(p+i);
349 val |= valx;
352 return val;
356 #ifdef UNUSED_CODE
357 static void oemSet_NB32(u32 addr, u32 val, u8 *valid)
362 static u32 oemGet_NB32(u32 addr, u8 *valid)
364 *valid = 0;
365 return 0xffffffff;
367 #endif
370 static u8 oemNodePresent_D(u8 Node, u8 *ret)
372 *ret = 0;
373 return 0;