io 151 ompi/mca/io/base/io_base_find_available.c mca_io_base_component_2_0_0_t *io = io 154 ompi/mca/io/base/io_base_find_available.c return io->io_init_query(enable_progress_threads, io 44 ompi/mca/io/base/io_base_frame.c MCA_BASE_FRAMEWORK_DECLARE(ompi, io, "I/O", NULL, mca_io_base_open, NULL, io 30 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c int io; io 31 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c io = atoi(c); io 32 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c if (io <= 0) { io 37 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c xfs_direct_read_chunk_size = io; io 45 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c int io; io 46 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c io = atoi(c); io 47 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c if (io <= 0) { io 52 ompi/mca/io/romio321/romio/adio/ad_xfs/ad_xfs_hints.c xfs_direct_write_chunk_size = io; io 51 ompi/mpi/java/java/MPI.java import java.io.*; io 233 opal/mca/event/libevent2022/libevent/event-internal.h struct event_io_map io; io 584 opal/mca/event/libevent2022/libevent/event.c evmap_io_initmap(&base->io); io 786 opal/mca/event/libevent2022/libevent/event.c evmap_io_clear(&base->io); io 864 opal/mca/event/libevent2022/libevent/event.c evmap_io_clear(&base->io); io 266 opal/mca/event/libevent2022/libevent/evmap.c struct event_io_map *io = &base->io; io 278 opal/mca/event/libevent2022/libevent/evmap.c if (fd >= io->nentries) { io 279 opal/mca/event/libevent2022/libevent/evmap.c if (evmap_make_space(io, fd, sizeof(struct evmap_io *)) == -1) io 283 opal/mca/event/libevent2022/libevent/evmap.c GET_IO_SLOT_AND_CTOR(ctx, io, fd, evmap_io, evmap_io_init, io 339 opal/mca/event/libevent2022/libevent/evmap.c struct event_io_map *io = &base->io; io 350 opal/mca/event/libevent2022/libevent/evmap.c if (fd >= io->nentries) io 354 opal/mca/event/libevent2022/libevent/evmap.c GET_IO_SLOT(ctx, io, fd, evmap_io); io 392 opal/mca/event/libevent2022/libevent/evmap.c struct event_io_map *io = &base->io; io 397 opal/mca/event/libevent2022/libevent/evmap.c EVUTIL_ASSERT(fd < io->nentries); io 399 opal/mca/event/libevent2022/libevent/evmap.c GET_IO_SLOT(ctx, io, fd, evmap_io); io 518 opal/mca/event/libevent2022/libevent/evmap.c GET_IO_SLOT(ctx, &base->io, change->fd, evmap_io); io 542 opal/mca/event/libevent2022/libevent/evmap.c for (i = 0; i < base->io.nentries; ++i) { io 543 opal/mca/event/libevent2022/libevent/evmap.c struct evmap_io *io = base->io.entries[i]; io 545 opal/mca/event/libevent2022/libevent/evmap.c if (!io) io 548 opal/mca/event/libevent2022/libevent/evmap.c ( ((char*)io) + sizeof(struct evmap_io) ); io 736 opal/mca/event/libevent2022/libevent/evmap.c struct event_io_map *io = &base->io; io 751 opal/mca/event/libevent2022/libevent/evmap.c HT_FOREACH(mapent, event_io_map, io) { io 755 opal/mca/event/libevent2022/libevent/evmap.c for (i = 0; i < io->nentries; ++i) { io 756 opal/mca/event/libevent2022/libevent/evmap.c struct evmap_io *ctx = io->entries[i]; io 310 opal/mca/event/libevent2022/libevent/poll.c idx = evmap_io_get_fdinfo(&base->io, pop->event_set[i].fd); io 163 opal/mca/event/libevent2022/libevent/win32select.c ent2 = evmap_io_get_fdinfo(&base->io, s2); io 305 opal/mca/hwloc/base/base.h OPAL_DECLSPEC int opal_hwloc_base_topology_set_flags (hwloc_topology_t topology, unsigned long flags, bool io); io 2389 opal/mca/hwloc/base/hwloc_base_util.c int opal_hwloc_base_topology_set_flags (hwloc_topology_t topology, unsigned long flags, bool io) { io 2390 opal/mca/hwloc/base/hwloc_base_util.c if (io) {