ecx 131 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuiddump_find_by_input(unsigned *eax, unsigned *ebx, unsigned *ecx, unsigned *edx, struct cpuiddump *cpuiddump) ecx 141 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c if ((entry->inmask & 0x4) && *ecx != entry->inecx) ecx 147 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c *ecx = entry->outecx; ecx 153 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c *eax, *ebx, *ecx, *edx); ecx 156 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c *ecx = 0; ecx 160 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c static void cpuid_or_from_dump(unsigned *eax, unsigned *ebx, unsigned *ecx, unsigned *edx, struct cpuiddump *src_cpuiddump) ecx 163 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuiddump_find_by_input(eax, ebx, ecx, edx, src_cpuiddump); ecx 165 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c hwloc_x86_cpuid(eax, ebx, ecx, edx); ecx 268 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c unsigned eax, ebx, ecx = 0, edx; ecx 280 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 345 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 346 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c coreidsize = (ecx >> 12) & 0xf; ecx 349 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c max_nbcores = (ecx & 0xff) + 1; ecx 382 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 390 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c node_id = ecx & 0xff; ecx 391 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c nodes_per_proc = ((ecx >> 8) & 7) + 1; ecx 417 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = cachenum; ecx 418 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 429 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = cachenum; ecx 430 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 453 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cache->sets = sets = ecx + 1; ecx 474 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 475 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c fill_amd_cache(infos, 1, HWLOC_OBJ_CACHE_DATA, 1, ecx); /* private L1d */ ecx 480 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 481 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c if (ecx & 0xf000) ecx 486 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c fill_amd_cache(infos, 2, HWLOC_OBJ_CACHE_UNIFIED, 1, ecx); /* private L2u */ ecx 504 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = cachenum; ecx 505 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 535 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = cachenum; ecx 536 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 561 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cache->sets = sets = ecx + 1; ecx 580 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = level; ecx 582 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 591 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = level; ecx 593 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 598 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c apic_type = (ecx & 0xff00) >> 8; ecx 1153 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c unsigned eax, ebx, ecx = 0, edx; ecx 1215 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 1217 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c if (ebx == INTEL_EBX && ecx == INTEL_ECX && edx == INTEL_EDX) ecx 1219 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c else if (ebx == AMD_EBX && ecx == AMD_ECX && edx == AMD_EDX) ecx 1221 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c else if ((ebx == ZX_EBX && ecx == ZX_ECX && edx == ZX_EDX) ecx 1222 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c || (ebx == SH_EBX && ecx == SH_ECX && edx == SH_EDX)) ecx 1231 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 1233 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c features[4] = ecx; ecx 1236 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 1243 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c ecx = 0; ecx 1244 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 1250 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c cpuid_or_from_dump(&eax, &ebx, &ecx, &edx, src_cpuiddump); ecx 1252 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-x86.c features[6] = ecx; ecx 53 opal/mca/hwloc/hwloc201/hwloc/include/private/cpuid-x86.h static __hwloc_inline void hwloc_x86_cpuid(unsigned *eax, unsigned *ebx, unsigned *ecx, unsigned *edx) ecx 57 opal/mca/hwloc/hwloc201/hwloc/include/private/cpuid-x86.h __cpuidex(regs, *eax, *ecx); ecx 60 opal/mca/hwloc/hwloc201/hwloc/include/private/cpuid-x86.h *ecx = regs[2]; ecx 73 opal/mca/hwloc/hwloc201/hwloc/include/private/cpuid-x86.h "+c" (*ecx), "=&d" (*edx)); ecx 79 opal/mca/hwloc/hwloc201/hwloc/include/private/cpuid-x86.h : "+a" (*eax), "=&SD" (*ebx), "+c" (*ecx), "=&d" (*edx));