nb                 60 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-cuda.c   int nb, i;
nb                 66 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-cuda.c   cures = cudaGetDeviceCount(&nb);
nb                 70 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-cuda.c   for (i = 0; i < nb; i++) {
nb                 22 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-nvml.c   unsigned nb, i;
nb                 31 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-nvml.c   ret = nvmlDeviceGetCount(&nb);
nb                 32 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-nvml.c   if (NVML_SUCCESS != ret || !nb) {
nb                 37 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-nvml.c   for(i=0; i<nb; i++) {
nb                 66 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c   unsigned nb;
nb                141 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c       unsigned step, nb;
nb                159 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	nb = (unsigned) strtol(tmp2, &tmp3, 0);
nb                165 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	if (!nb) {
nb                171 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	loops[cur_loop].nb = nb;
nb                174 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	nbs *= nb;
nb                233 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	unsigned step, nb;
nb                245 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	nb = data->level[mydepth].totalwidth / data->level[prevdepth].totalwidth; /* number of us within parent */
nb                248 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	loops[cur_loop].nb = nb;
nb                249 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	assert(nb);
nb                253 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	nbs *= nb;
nb                262 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	loops[nr_loops].nb = total/nbs;
nb                275 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c       unsigned nb = loops[i].nb;
nb                277 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c 	array[j] += ((j / step) % nb) * mul;
nb                278 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c       mul *= nb;
nb               1130 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c     loops[nr_loops-1].nb = j;
nb               1139 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c       ind += (i / loops[j].step) % loops[j].nb * mul;
nb               1140 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c       mul *= loops[j].nb;
nb               1148 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-synthetic.c     res = hwloc_snprintf(tmp, tmplen, "%u*%u%s", loops[j].step, loops[j].nb,
nb                695 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   unsigned nb;
nb                703 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   nb = (unsigned)((((uintptr_t) addr + len - start) + page_size - 1) / page_size);
nb                705 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   if (!nb)
nb                706 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c     nb = 1;
nb                708 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   pv = calloc(nb, sizeof(*pv));
nb                712 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   for (i = 0; i < nb; i++)
nb                714 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   if (!QueryWorkingSetExProc(GetCurrentProcess(), pv, nb * sizeof(*pv))) {
nb                719 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology-windows.c   for (i = 0; i < nb; i++) {
nb               2611 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology.c   unsigned i, nb;
nb               2621 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology.c   nb = i;
nb               2623 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology.c   if (nb) {
nb               2625 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology.c     slevel->objs = malloc(nb * sizeof(struct hwloc_obj *));
nb               2636 opal/mca/hwloc/hwloc201/hwloc/hwloc/topology.c   slevel->nbobjs = nb;
nb               3492 oshmem/mca/memheap/ptmalloc/malloc.c static void* mmap_alloc(mstate m, size_t nb) {
nb               3493 oshmem/mca/memheap/ptmalloc/malloc.c   size_t mmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
nb               3494 oshmem/mca/memheap/ptmalloc/malloc.c   if (mmsize > nb) {     /* Check for wrap around 0 */
nb               3519 oshmem/mca/memheap/ptmalloc/malloc.c static mchunkptr mmap_resize(mstate m, mchunkptr oldp, size_t nb) {
nb               3521 oshmem/mca/memheap/ptmalloc/malloc.c   if (is_small(nb)) /* Can't shrink mmap regions below small size */
nb               3524 oshmem/mca/memheap/ptmalloc/malloc.c   if (oldsize >= nb + SIZE_T_SIZE &&
nb               3525 oshmem/mca/memheap/ptmalloc/malloc.c       (oldsize - nb) <= (mparams.granularity << 1))
nb               3530 oshmem/mca/memheap/ptmalloc/malloc.c     size_t newmmsize = mmap_align(nb + SIX_SIZE_T_SIZES + CHUNK_ALIGN_MASK);
nb               3600 oshmem/mca/memheap/ptmalloc/malloc.c                            size_t nb) {
nb               3604 oshmem/mca/memheap/ptmalloc/malloc.c   mchunkptr q = chunk_plus_offset(p, nb);
nb               3605 oshmem/mca/memheap/ptmalloc/malloc.c   size_t qsize = psize - nb;
nb               3606 oshmem/mca/memheap/ptmalloc/malloc.c   set_size_and_pinuse_of_inuse_chunk(m, p, nb);
nb               3636 oshmem/mca/memheap/ptmalloc/malloc.c   check_malloced_chunk(m, chunk2mem(p), nb);
nb               3696 oshmem/mca/memheap/ptmalloc/malloc.c static void* sys_alloc(mstate m, size_t nb) {
nb               3704 oshmem/mca/memheap/ptmalloc/malloc.c   if (use_mmap(m) && nb >= mparams.mmap_threshold) {
nb               3705 oshmem/mca/memheap/ptmalloc/malloc.c     void* mem = mmap_alloc(m, nb);
nb               3736 oshmem/mca/memheap/ptmalloc/malloc.c         asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE);
nb               3750 oshmem/mca/memheap/ptmalloc/malloc.c       asize = granularity_align(nb - m->topsize + TOP_FOOT_SIZE + SIZE_T_ONE);
nb               3762 oshmem/mca/memheap/ptmalloc/malloc.c             asize < nb + TOP_FOOT_SIZE + SIZE_T_ONE) {
nb               3763 oshmem/mca/memheap/ptmalloc/malloc.c           size_t esize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE - asize);
nb               3787 oshmem/mca/memheap/ptmalloc/malloc.c     size_t req = nb + TOP_FOOT_SIZE + SIZE_T_ONE;
nb               3789 oshmem/mca/memheap/ptmalloc/malloc.c     if (rsize > nb) { /* Fail if wraps around zero */
nb               3800 oshmem/mca/memheap/ptmalloc/malloc.c     size_t asize = granularity_align(nb + TOP_FOOT_SIZE + SIZE_T_ONE);
nb               3810 oshmem/mca/memheap/ptmalloc/malloc.c         if (ssize > nb + TOP_FOOT_SIZE) {
nb               3867 oshmem/mca/memheap/ptmalloc/malloc.c           return prepend_alloc(m, tbase, oldbase, nb);
nb               3874 oshmem/mca/memheap/ptmalloc/malloc.c     if (nb < m->topsize) { /* Allocate from new or extended top space */
nb               3875 oshmem/mca/memheap/ptmalloc/malloc.c       size_t rsize = m->topsize -= nb;
nb               3877 oshmem/mca/memheap/ptmalloc/malloc.c       mchunkptr r = m->top = chunk_plus_offset(p, nb);
nb               3879 oshmem/mca/memheap/ptmalloc/malloc.c       set_size_and_pinuse_of_inuse_chunk(m, p, nb);
nb               3881 oshmem/mca/memheap/ptmalloc/malloc.c       check_malloced_chunk(m, chunk2mem(p), nb);
nb               4006 oshmem/mca/memheap/ptmalloc/malloc.c static void* tmalloc_large(mstate m, size_t nb) {
nb               4008 oshmem/mca/memheap/ptmalloc/malloc.c   size_t rsize = -nb; /* Unsigned negation */
nb               4011 oshmem/mca/memheap/ptmalloc/malloc.c   compute_tree_index(nb, idx);
nb               4015 oshmem/mca/memheap/ptmalloc/malloc.c     size_t sizebits = nb << leftshift_for_tree_index(idx);
nb               4019 oshmem/mca/memheap/ptmalloc/malloc.c       size_t trem = chunksize(t) - nb;
nb               4048 oshmem/mca/memheap/ptmalloc/malloc.c     size_t trem = chunksize(t) - nb;
nb               4057 oshmem/mca/memheap/ptmalloc/malloc.c   if (v != 0 && rsize < (size_t)(m->dvsize - nb)) {
nb               4059 oshmem/mca/memheap/ptmalloc/malloc.c       mchunkptr r = chunk_plus_offset(v, nb);
nb               4060 oshmem/mca/memheap/ptmalloc/malloc.c       dl_assert(chunksize(v) == rsize + nb);
nb               4064 oshmem/mca/memheap/ptmalloc/malloc.c           set_inuse_and_pinuse(m, v, (rsize + nb));
nb               4066 oshmem/mca/memheap/ptmalloc/malloc.c           set_size_and_pinuse_of_inuse_chunk(m, v, nb);
nb               4079 oshmem/mca/memheap/ptmalloc/malloc.c static void* tmalloc_small(mstate m, size_t nb) {
nb               4087 oshmem/mca/memheap/ptmalloc/malloc.c   rsize = chunksize(t) - nb;
nb               4090 oshmem/mca/memheap/ptmalloc/malloc.c     size_t trem = chunksize(t) - nb;
nb               4098 oshmem/mca/memheap/ptmalloc/malloc.c     mchunkptr r = chunk_plus_offset(v, nb);
nb               4099 oshmem/mca/memheap/ptmalloc/malloc.c     dl_assert(chunksize(v) == rsize + nb);
nb               4103 oshmem/mca/memheap/ptmalloc/malloc.c         set_inuse_and_pinuse(m, v, (rsize + nb));
nb               4105 oshmem/mca/memheap/ptmalloc/malloc.c         set_size_and_pinuse_of_inuse_chunk(m, v, nb);
nb               4135 oshmem/mca/memheap/ptmalloc/malloc.c       size_t nb = request2size(bytes);
nb               4137 oshmem/mca/memheap/ptmalloc/malloc.c         newp = mmap_resize(m, oldp, nb);
nb               4138 oshmem/mca/memheap/ptmalloc/malloc.c       else if (oldsize >= nb) { /* already big enough */
nb               4139 oshmem/mca/memheap/ptmalloc/malloc.c         size_t rsize = oldsize - nb;
nb               4142 oshmem/mca/memheap/ptmalloc/malloc.c           mchunkptr remainder = chunk_plus_offset(newp, nb);
nb               4143 oshmem/mca/memheap/ptmalloc/malloc.c           set_inuse(m, newp, nb);
nb               4148 oshmem/mca/memheap/ptmalloc/malloc.c       else if (next == m->top && oldsize + m->topsize > nb) {
nb               4151 oshmem/mca/memheap/ptmalloc/malloc.c         size_t newtopsize = newsize - nb;
nb               4152 oshmem/mca/memheap/ptmalloc/malloc.c         mchunkptr newtop = chunk_plus_offset(oldp, nb);
nb               4153 oshmem/mca/memheap/ptmalloc/malloc.c         set_inuse(m, oldp, nb);
nb               4207 oshmem/mca/memheap/ptmalloc/malloc.c     size_t nb = request2size(bytes);
nb               4208 oshmem/mca/memheap/ptmalloc/malloc.c     size_t req = nb + alignment + MIN_CHUNK_SIZE - CHUNK_OVERHEAD;
nb               4250 oshmem/mca/memheap/ptmalloc/malloc.c         if (size > nb + MIN_CHUNK_SIZE) {
nb               4251 oshmem/mca/memheap/ptmalloc/malloc.c           size_t remainder_size = size - nb;
nb               4252 oshmem/mca/memheap/ptmalloc/malloc.c           mchunkptr remainder = chunk_plus_offset(p, nb);
nb               4253 oshmem/mca/memheap/ptmalloc/malloc.c           set_inuse(m, p, nb);
nb               4259 oshmem/mca/memheap/ptmalloc/malloc.c       assert (chunksize(p) >= nb);
nb               4434 oshmem/mca/memheap/ptmalloc/malloc.c     size_t nb;
nb               4438 oshmem/mca/memheap/ptmalloc/malloc.c       nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
nb               4439 oshmem/mca/memheap/ptmalloc/malloc.c       idx = small_index(nb);
nb               4451 oshmem/mca/memheap/ptmalloc/malloc.c         check_malloced_chunk(gm, mem, nb);
nb               4455 oshmem/mca/memheap/ptmalloc/malloc.c       else if (nb > gm->dvsize) {
nb               4467 oshmem/mca/memheap/ptmalloc/malloc.c           rsize = small_index2size(i) - nb;
nb               4472 oshmem/mca/memheap/ptmalloc/malloc.c             set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
nb               4473 oshmem/mca/memheap/ptmalloc/malloc.c             r = chunk_plus_offset(p, nb);
nb               4478 oshmem/mca/memheap/ptmalloc/malloc.c           check_malloced_chunk(gm, mem, nb);
nb               4482 oshmem/mca/memheap/ptmalloc/malloc.c         else if (gm->treemap != 0 && (mem = tmalloc_small(gm, nb)) != 0) {
nb               4483 oshmem/mca/memheap/ptmalloc/malloc.c           check_malloced_chunk(gm, mem, nb);
nb               4489 oshmem/mca/memheap/ptmalloc/malloc.c       nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
nb               4491 oshmem/mca/memheap/ptmalloc/malloc.c       nb = pad_request(bytes);
nb               4492 oshmem/mca/memheap/ptmalloc/malloc.c       if (gm->treemap != 0 && (mem = tmalloc_large(gm, nb)) != 0) {
nb               4493 oshmem/mca/memheap/ptmalloc/malloc.c         check_malloced_chunk(gm, mem, nb);
nb               4498 oshmem/mca/memheap/ptmalloc/malloc.c     if (nb <= gm->dvsize) {
nb               4499 oshmem/mca/memheap/ptmalloc/malloc.c       size_t rsize = gm->dvsize - nb;
nb               4502 oshmem/mca/memheap/ptmalloc/malloc.c         mchunkptr r = gm->dv = chunk_plus_offset(p, nb);
nb               4505 oshmem/mca/memheap/ptmalloc/malloc.c         set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
nb               4514 oshmem/mca/memheap/ptmalloc/malloc.c       check_malloced_chunk(gm, mem, nb);
nb               4518 oshmem/mca/memheap/ptmalloc/malloc.c     else if (nb < gm->topsize) { /* Split top */
nb               4519 oshmem/mca/memheap/ptmalloc/malloc.c       size_t rsize = gm->topsize -= nb;
nb               4521 oshmem/mca/memheap/ptmalloc/malloc.c       mchunkptr r = gm->top = chunk_plus_offset(p, nb);
nb               4523 oshmem/mca/memheap/ptmalloc/malloc.c       set_size_and_pinuse_of_inuse_chunk(gm, p, nb);
nb               4526 oshmem/mca/memheap/ptmalloc/malloc.c       check_malloced_chunk(gm, mem, nb);
nb               4530 oshmem/mca/memheap/ptmalloc/malloc.c     mem = sys_alloc(gm, nb);
nb               4863 oshmem/mca/memheap/ptmalloc/malloc.c     size_t nb;
nb               4867 oshmem/mca/memheap/ptmalloc/malloc.c       nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes);
nb               4868 oshmem/mca/memheap/ptmalloc/malloc.c       idx = small_index(nb);
nb               4880 oshmem/mca/memheap/ptmalloc/malloc.c         check_malloced_chunk(ms, mem, nb);
nb               4884 oshmem/mca/memheap/ptmalloc/malloc.c       else if (nb > ms->dvsize) {
nb               4896 oshmem/mca/memheap/ptmalloc/malloc.c           rsize = small_index2size(i) - nb;
nb               4901 oshmem/mca/memheap/ptmalloc/malloc.c             set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
nb               4902 oshmem/mca/memheap/ptmalloc/malloc.c             r = chunk_plus_offset(p, nb);
nb               4907 oshmem/mca/memheap/ptmalloc/malloc.c           check_malloced_chunk(ms, mem, nb);
nb               4911 oshmem/mca/memheap/ptmalloc/malloc.c         else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) {
nb               4912 oshmem/mca/memheap/ptmalloc/malloc.c           check_malloced_chunk(ms, mem, nb);
nb               4918 oshmem/mca/memheap/ptmalloc/malloc.c       nb = MAX_SIZE_T; /* Too big to allocate. Force failure (in sys alloc) */
nb               4920 oshmem/mca/memheap/ptmalloc/malloc.c       nb = pad_request(bytes);
nb               4921 oshmem/mca/memheap/ptmalloc/malloc.c       if (ms->treemap != 0 && (mem = tmalloc_large(ms, nb)) != 0) {
nb               4922 oshmem/mca/memheap/ptmalloc/malloc.c         check_malloced_chunk(ms, mem, nb);
nb               4927 oshmem/mca/memheap/ptmalloc/malloc.c     if (nb <= ms->dvsize) {
nb               4928 oshmem/mca/memheap/ptmalloc/malloc.c       size_t rsize = ms->dvsize - nb;
nb               4931 oshmem/mca/memheap/ptmalloc/malloc.c         mchunkptr r = ms->dv = chunk_plus_offset(p, nb);
nb               4934 oshmem/mca/memheap/ptmalloc/malloc.c         set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
nb               4943 oshmem/mca/memheap/ptmalloc/malloc.c       check_malloced_chunk(ms, mem, nb);
nb               4947 oshmem/mca/memheap/ptmalloc/malloc.c     else if (nb < ms->topsize) { /* Split top */
nb               4948 oshmem/mca/memheap/ptmalloc/malloc.c       size_t rsize = ms->topsize -= nb;
nb               4950 oshmem/mca/memheap/ptmalloc/malloc.c       mchunkptr r = ms->top = chunk_plus_offset(p, nb);
nb               4952 oshmem/mca/memheap/ptmalloc/malloc.c       set_size_and_pinuse_of_inuse_chunk(ms, p, nb);
nb               4955 oshmem/mca/memheap/ptmalloc/malloc.c       check_malloced_chunk(ms, mem, nb);
nb               4959 oshmem/mca/memheap/ptmalloc/malloc.c     mem = sys_alloc(ms, nb);