36 #ifndef EM_QUEUE_INLINE_H_
37 #define EM_QUEUE_INLINE_H_
45 #define DIFF_ABS(a, b) ((a) > (b) ? (a) - (b) : (b) - (a))
46 #define SMALLEST_NBR(a, b) ((a) > (b) ? (b) : (a))
57 queue_hdl2idx(em_queue_t queue)
68 static inline em_queue_t
69 queue_idx2hdl(
int queue_idx)
74 iq.device_id =
em_shm->conf.device_id;
81 queue_id2idx(uint16_t queue_id)
87 static inline em_queue_t
88 queue_id2hdl(uint16_t queue_id)
92 iq.queue_id = queue_id;
93 iq.device_id =
em_shm->conf.device_id;
105 queue_external(em_queue_t queue)
112 return iq.device_id !=
em_shm->conf.device_id ? true :
false;
117 queue_elem_get(
const em_queue_t queue)
124 queue_idx = queue_id2idx(iq.queue_id);
126 if (unlikely(iq.device_id !=
em_shm->conf.device_id ||
127 (
unsigned int)queue_idx >
em_shm->opt.queue.max_num - 1))
130 queue_elem = &
em_shm->queue_tbl.queue_elem[queue_idx];
135 static inline em_queue_t
140 if (unlikely(q_elem == NULL))
143 return (em_queue_t)(uintptr_t)q_elem->
queue;
147 list_node_to_queue_elem(
const list_node_t *
const list_node)
152 return likely(list_node != NULL) ? q_elem : NULL;
167 odp_schedule_sync_t *odp_schedule_sync )
169 switch (em_queue_type) {
171 *odp_schedule_sync = ODP_SCHED_SYNC_ATOMIC;
174 *odp_schedule_sync = ODP_SCHED_SYNC_PARALLEL;
177 *odp_schedule_sync = ODP_SCHED_SYNC_ORDERED;
185 scheduled_queue_type_odp2em(odp_schedule_sync_t odp_schedule_sync,
188 switch (odp_schedule_sync) {
189 case ODP_SCHED_SYNC_ATOMIC:
192 case ODP_SCHED_SYNC_PARALLEL:
195 case ODP_SCHED_SYNC_ORDERED:
204 static inline em_event_t
207 odp_queue_t odp_queue;
208 odp_event_t odp_event;
212 odp_event = odp_queue_deq(odp_queue);
213 if (odp_event == ODP_EVENT_INVALID)
216 em_event = event_odp2em(odp_event);
229 em_event_t events[],
int num)
231 odp_queue_t odp_queue;
235 odp_event_t *
const odp_events = (odp_event_t *)events;
238 ret = odp_queue_deq_multi(odp_queue, odp_events , num);
246 event_to_hdr_multi(events, ev_hdrs, ret);
248 EVSTATE__DEQUEUE_MULTI);