1 /******************************************************************************
3 * Copyright (c) 2019 Intel.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
17 *******************************************************************************/
20 * @brief XRAN main functionality module
22 * @ingroup group_source_xran
23 * @author Intel Corporation
32 #include <sys/queue.h>
39 #include <rte_common.h>
41 #include <rte_errno.h>
42 #include <rte_lcore.h>
43 #include <rte_cycles.h>
44 #include <rte_memory.h>
45 #include <rte_memzone.h>
49 #include "xran_fh_o_du.h"
53 #include "xran_up_api.h"
54 #include "xran_cp_api.h"
55 #include "xran_sync_api.h"
56 #include "xran_lib_mlog_tasks_id.h"
57 #include "xran_timer.h"
58 #include "xran_common.h"
59 #include "xran_frame_struct.h"
60 #include "xran_printf.h"
61 #include "xran_app_frag.h"
63 #include "xran_mlog_lnx.h"
65 #define DIV_ROUND_OFFSET(X,Y) ( X/Y + ((X%Y)?1:0) )
67 #define MAX_NUM_OF_XRAN_CTX (2)
68 #define XranIncrementCtx(ctx) ((ctx >= (MAX_NUM_OF_XRAN_CTX-1)) ? 0 : (ctx+1))
69 #define XranDecrementCtx(ctx) ((ctx == 0) ? (MAX_NUM_OF_XRAN_CTX-1) : (ctx-1))
71 #define MAX_NUM_OF_DPDK_TIMERS (10)
72 #define DpdkTimerIncrementCtx(ctx) ((ctx >= (MAX_NUM_OF_DPDK_TIMERS-1)) ? 0 : (ctx+1))
73 #define DpdkTimerDecrementCtx(ctx) ((ctx == 0) ? (MAX_NUM_OF_DPDK_TIMERS-1) : (ctx-1))
75 /* Difference between Unix seconds to GPS seconds
76 GPS epoch: 1980.1.6 00:00:00 (UTC); Unix time epoch: 1970:1.1 00:00:00 UTC
77 Value is calculated on Sep.6 2019. Need to be change if International
78 Earth Rotation and Reference Systems Service (IERS) adds more leap seconds
79 1970:1.1 - 1980.1.6: 3657 days
80 3657*24*3600=315 964 800 seconds (unix seconds value at 1980.1.6 00:00:00 (UTC))
81 There are 18 leap seconds inserted after 1980.1.6 00:00:00 (UTC), which means
82 GPS is 18 larger. 315 964 800 - 18 = 315 964 782
84 #define UNIX_TO_GPS_SECONDS_OFFSET 315964782UL
85 #define NUM_OF_FRAMES_PER_SFN_PERIOD 1024
86 #define NUM_OF_FRAMES_PER_SECOND 100
88 //#define XRAN_CREATE_RBMAP /**< generate slot map base on symbols */
91 struct xran_timer_ctx {
92 uint32_t tti_to_process;
97 XRAN_IN_PREV_PERIOD = 0,
102 static xran_cc_handle_t pLibInstanceHandles[XRAN_PORTS_NUM][XRAN_MAX_SECTOR_NR] = {NULL};
103 static struct xran_device_ctx g_xran_dev_ctx[XRAN_PORTS_NUM] = { 0 };
105 struct xran_timer_ctx timer_ctx[MAX_NUM_OF_XRAN_CTX];
106 struct xran_timer_ctx cb_timer_ctx[10*MAX_NUM_OF_XRAN_CTX];
109 static struct rte_timer tti_to_phy_timer[10];
110 static struct rte_timer sym_timer;
111 static struct rte_timer dpdk_timer[MAX_NUM_OF_DPDK_TIMERS];
113 uint64_t interval_us = 1000;
115 uint32_t xran_lib_ota_tti = 0; /**< Slot index in a second [0:(1000000/TTI-1)] */
116 uint32_t xran_lib_ota_sym = 0; /**< Symbol index in a slot [0:13] */
117 uint32_t xran_lib_ota_sym_idx = 0; /**< Symbol index in a second [0 : 14*(1000000/TTI)-1]
118 where TTI is TTI interval in microseconds */
119 uint16_t xran_SFN_at_Sec_Start = 0; /**< SFN at current second start */
120 uint16_t xran_max_frame = 1023; /**< value of max frame used. expected to be 99 (old compatibility mode) and 1023 as per section 9.7.2 System Frame Number Calculation */
122 static uint8_t xran_cp_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_DIR_MAX][XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR]; /* XRAN_MAX_ANTENNA_NR * 2 for PUSCH and PRACH */
123 static uint8_t xran_updl_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR];
124 static uint8_t xran_upul_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR]; /**< PUSCH, PRACH, SRS for Cat B */
126 static uint8_t xran_section_id_curslot[XRAN_DIR_MAX][XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2+ XRAN_MAX_ANT_ARRAY_ELM_NR];
127 static uint16_t xran_section_id[XRAN_DIR_MAX][XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2+ XRAN_MAX_ANT_ARRAY_ELM_NR];
128 static uint64_t xran_total_tick = 0, xran_used_tick = 0;
129 static uint32_t xran_core_used = 0;
130 static int32_t first_call = 0;
134 extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
138 static struct rte_mbuf_ext_shared_info share_data[XRAN_N_FE_BUF_LEN][XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR];
139 static struct rte_mbuf_ext_shared_info cp_share_data[XRAN_N_FE_BUF_LEN][XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR];
142 void xran_timer_arm(struct rte_timer *tim, void* arg);
144 int32_t xran_process_tx_sym(void *arg);
146 int32_t xran_process_rx_sym(void *arg,
147 struct rte_mbuf *mbuf,
163 int32_t xran_process_prach_sym(void *arg,
164 struct rte_mbuf *mbuf,
180 int32_t xran_process_srs_sym(void *arg,
181 struct rte_mbuf *mbuf,
198 void tti_ota_cb(struct rte_timer *tim, void *arg);
199 void tti_to_phy_cb(struct rte_timer *tim, void *arg);
200 void xran_timer_arm_ex(struct rte_timer *tim, void* CbFct, void *CbArg, unsigned tim_lcore);
203 // Return SFN at current second start, 10 bits, [0, 1023]
204 static inline uint16_t xran_getSfnSecStart(void)
206 return xran_SFN_at_Sec_Start;
208 void xran_updateSfnSecStart(void)
210 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
211 struct xran_common_counters * pCnt = &p_xran_dev_ctx->fh_counters;
212 uint64_t currentSecond = timing_get_current_second();
213 // Assume always positive
214 uint64_t gpsSecond = currentSecond - UNIX_TO_GPS_SECONDS_OFFSET;
215 uint64_t nFrames = gpsSecond * NUM_OF_FRAMES_PER_SECOND;
216 uint16_t sfn = (uint16_t)(nFrames % (xran_max_frame + 1));
217 xran_SFN_at_Sec_Start = sfn;
219 pCnt->tx_bytes_per_sec = pCnt->tx_bytes_counter;
220 pCnt->rx_bytes_per_sec = pCnt->rx_bytes_counter;
221 pCnt->tx_bytes_counter = 0;
222 pCnt->rx_bytes_counter = 0;
225 static inline int32_t xran_getSlotIdxSecond(void)
227 int32_t frameIdxSecond = xran_getSfnSecStart();
228 int32_t slotIndxSecond = frameIdxSecond * SLOTS_PER_SYSTEMFRAME;
229 return slotIndxSecond;
232 struct xran_device_ctx *xran_dev_get_ctx(void)
234 return &g_xran_dev_ctx[0];
237 static inline struct xran_fh_config *xran_lib_get_ctx_fhcfg(void)
239 return (&(xran_dev_get_ctx()->fh_cfg));
242 static inline int32_t XranOffsetSym(int32_t offSym, int32_t otaSym, int32_t numSymTotal, enum xran_in_period* pInPeriod)
246 // Suppose the offset is usually small
247 if (unlikely(offSym > otaSym))
249 sym = numSymTotal - offSym + otaSym;
250 *pInPeriod = XRAN_IN_PREV_PERIOD;
254 sym = otaSym - offSym;
256 if (unlikely(sym >= numSymTotal))
259 *pInPeriod = XRAN_IN_NEXT_PERIOD;
263 *pInPeriod = XRAN_IN_CURR_PERIOD;
270 uint16_t xran_get_beamid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id)
272 return (0); // NO BEAMFORMING
275 enum xran_if_state xran_get_if_state(void)
277 return xran_if_current_state;
280 int xran_is_prach_slot(uint32_t subframe_id, uint32_t slot_id)
282 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
283 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
284 int32_t is_prach_slot = 0;
286 if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology < 2){
287 //for FR1, in 38.211 tab 6.3.3.2-2&3 it is subframe index
288 if (pPrachCPConfig->isPRACHslot[subframe_id] == 1){
289 if (pPrachCPConfig->nrofPrachInSlot != 1)
292 if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology == 0)
294 else if (slot_id == 1)
298 } else if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology == 3){
299 //for FR2, 38.211 tab 6.3.3.4 it is slot index of 60kHz slot
301 slotidx = subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
302 if (pPrachCPConfig->nrofPrachInSlot == 2){
303 if (pPrachCPConfig->isPRACHslot[slotidx>>1] == 1)
306 if ((pPrachCPConfig->isPRACHslot[slotidx>>1] == 1) && ((slotidx % 2) == 1)){
311 print_err("Numerology %d not supported", p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology);
312 return is_prach_slot;
315 int xran_init_sectionid(void *pHandle)
319 for (dir = 0; dir < XRAN_DIR_MAX; dir++){
320 for(cell=0; cell < XRAN_MAX_CELLS_PER_PORT; cell++) {
321 for(ant=0; ant < XRAN_MAX_ANTENNA_NR; ant++) {
322 xran_section_id[dir][cell][ant] = 0;
323 xran_section_id_curslot[dir][cell][ant] = 255;
331 int xran_init_srs(struct xran_fh_config* pConf, struct xran_device_ctx * p_xran_dev_ctx)
333 struct xran_srs_config *p_srs = &(p_xran_dev_ctx->srs_cfg);
336 p_srs->symbMask = pConf->srs_conf.symbMask;
337 p_srs->eAxC_offset = pConf->srs_conf.eAxC_offset;
338 print_dbg("SRS sym %d\n", p_srs->symbMask );
339 print_dbg("SRS eAxC_offset %d\n", p_srs->eAxC_offset);
341 return (XRAN_STATUS_SUCCESS);
344 int xran_init_prach_lte(struct xran_fh_config* pConf, struct xran_device_ctx * p_xran_dev_ctx)
346 /* update Rach for LTE */
347 return xran_init_prach(pConf, p_xran_dev_ctx);
350 int xran_init_prach(struct xran_fh_config* pConf, struct xran_device_ctx * p_xran_dev_ctx)
354 struct xran_prach_config* pPRACHConfig = &(pConf->prach_conf);
355 const xRANPrachConfigTableStruct *pxRANPrachConfigTable;
356 uint8_t nNumerology = pConf->frame_conf.nNumerology;
357 uint8_t nPrachConfIdx = pPRACHConfig->nPrachConfIdx;
358 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
361 pxRANPrachConfigTable = &gxranPrachDataTable_mmw[nPrachConfIdx];
362 else if (pConf->frame_conf.nFrameDuplexType == 1)
363 pxRANPrachConfigTable = &gxranPrachDataTable_sub6_tdd[nPrachConfIdx];
365 pxRANPrachConfigTable = &gxranPrachDataTable_sub6_fdd[nPrachConfIdx];
367 uint8_t preambleFmrt = pxRANPrachConfigTable->preambleFmrt[0];
368 const xRANPrachPreambleLRAStruct *pxranPreambleforLRA = &gxranPreambleforLRA[preambleFmrt];
369 memset(pPrachCPConfig, 0, sizeof(struct xran_prach_cp_config));
371 printf("xRAN open PRACH config: Numerology %u ConfIdx %u, preambleFmrt %u startsymb %u, numSymbol %u, occassionsInPrachSlot %u\n", nNumerology, nPrachConfIdx, preambleFmrt, pxRANPrachConfigTable->startingSym, pxRANPrachConfigTable->duration, pxRANPrachConfigTable->occassionsInPrachSlot);
373 pPrachCPConfig->filterIdx = XRAN_FILTERINDEX_PRACH_ABC; // 3, PRACH preamble format A1~3, B1~4, C0, C2
374 pPrachCPConfig->startSymId = pxRANPrachConfigTable->startingSym;
375 pPrachCPConfig->startPrbc = pPRACHConfig->nPrachFreqStart;
376 pPrachCPConfig->numPrbc = (preambleFmrt >= FORMAT_A1)? 12 : 70;
377 pPrachCPConfig->timeOffset = pxranPreambleforLRA->nRaCp;
378 pPrachCPConfig->freqOffset = xran_get_freqoffset(pPRACHConfig->nPrachFreqOffset, pPRACHConfig->nPrachSubcSpacing);
379 pPrachCPConfig->x = pxRANPrachConfigTable->x;
380 pPrachCPConfig->nrofPrachInSlot = pxRANPrachConfigTable->nrofPrachInSlot;
381 pPrachCPConfig->y[0] = pxRANPrachConfigTable->y[0];
382 pPrachCPConfig->y[1] = pxRANPrachConfigTable->y[1];
383 if (preambleFmrt >= FORMAT_A1)
385 pPrachCPConfig->numSymbol = pxRANPrachConfigTable->duration;
386 pPrachCPConfig->occassionsInPrachSlot = pxRANPrachConfigTable->occassionsInPrachSlot;
390 pPrachCPConfig->numSymbol = 1;
391 pPrachCPConfig->occassionsInPrachSlot = 1;
395 printf("PRACH: x %u y[0] %u, y[1] %u prach slot: %u ..", pPrachCPConfig->x, pPrachCPConfig->y[0], pPrachCPConfig->y[1], pxRANPrachConfigTable->slotNr[0]);
396 pPrachCPConfig->isPRACHslot[pxRANPrachConfigTable->slotNr[0]] = 1;
397 for (i=1; i < XRAN_PRACH_CANDIDATE_SLOT; i++)
399 slotNr = pxRANPrachConfigTable->slotNr[i];
401 pPrachCPConfig->isPRACHslot[slotNr] = 1;
403 printf(" %u ..", slotNr);
407 for (i = 0; i < XRAN_MAX_SECTOR_NR; i++){
408 p_xran_dev_ctx->prach_start_symbol[i] = pPrachCPConfig->startSymId;
409 p_xran_dev_ctx->prach_last_symbol[i] = pPrachCPConfig->startSymId + pPrachCPConfig->numSymbol * pPrachCPConfig->occassionsInPrachSlot - 1;
411 if(pConf->log_level){
412 printf("PRACH start symbol %u lastsymbol %u\n", p_xran_dev_ctx->prach_start_symbol[0], p_xran_dev_ctx->prach_last_symbol[0]);
415 pPrachCPConfig->eAxC_offset = xran_get_num_eAxc(NULL);
416 print_dbg("PRACH eAxC_offset %d\n", pPrachCPConfig->eAxC_offset);
418 return (XRAN_STATUS_SUCCESS);
421 inline uint16_t xran_alloc_sectionid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id)
423 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
424 print_err("Invalid CC ID - %d", cc_id);
427 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) { //for PRACH, ant_id starts from num_ant
428 print_err("Invalid antenna ID - %d", ant_id);
432 /* if new slot has been started,
433 * then initializes section id again for new start */
434 if(xran_section_id_curslot[dir][cc_id][ant_id] != slot_id) {
435 xran_section_id[dir][cc_id][ant_id] = 0;
436 xran_section_id_curslot[dir][cc_id][ant_id] = slot_id;
439 return(xran_section_id[dir][cc_id][ant_id]++);
442 int xran_init_seqid(void *pHandle)
446 for(cell=0; cell < XRAN_MAX_CELLS_PER_PORT; cell++) {
447 for(dir=0; dir < XRAN_DIR_MAX; dir++) {
448 for(ant=0; ant < XRAN_MAX_ANTENNA_NR * 2; ant++)
449 xran_cp_seq_id_num[cell][dir][ant] = 0;
451 for(ant=0; ant < XRAN_MAX_ANTENNA_NR; ant++)
452 xran_updl_seq_id_num[cell][ant] = 0;
453 for(ant=0; ant < XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR; ant++)
454 xran_upul_seq_id_num[cell][ant] = 0;
460 static inline uint8_t xran_get_cp_seqid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id)
462 if(dir >= XRAN_DIR_MAX) {
463 print_err("Invalid direction - %d", dir);
466 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
467 print_err("Invalid CC ID - %d", cc_id);
470 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) {
471 print_err("Invalid antenna ID - %d", ant_id);
475 return(xran_cp_seq_id_num[cc_id][dir][ant_id]++);
477 static inline uint8_t xran_get_updl_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id)
479 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
480 print_err("Invalid CC ID - %d", cc_id);
483 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
484 print_err("Invalid antenna ID - %d", ant_id);
488 /* Only U-Plane DL needs to get sequence ID in O-DU */
489 return(xran_updl_seq_id_num[cc_id][ant_id]++);
491 static inline uint8_t *xran_get_updl_seqid_addr(void *pHandle, uint8_t cc_id, uint8_t ant_id)
493 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
494 print_err("Invalid CC ID - %d", cc_id);
497 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
498 print_err("Invalid antenna ID - %d", ant_id);
502 /* Only U-Plane DL needs to get sequence ID in O-DU */
503 return(&xran_updl_seq_id_num[cc_id][ant_id]);
505 static inline int8_t xran_check_upul_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id, uint8_t seq_id)
508 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
509 print_err("Invalid CC ID - %d", cc_id);
513 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) {
514 print_err("Invalid antenna ID - %d", ant_id);
518 /* O-DU needs to check the sequence ID of U-Plane UL from O-RU */
519 xran_upul_seq_id_num[cc_id][ant_id]++;
520 if(xran_upul_seq_id_num[cc_id][ant_id] == seq_id) { /* expected sequence */
521 return (XRAN_STATUS_SUCCESS);
523 print_dbg("expected seqid %u received %u, slot %u, ant %u cc %u", xran_upul_seq_id_num[cc_id][ant_id], seq_id, slot_id, ant_id, cc_id);
524 xran_upul_seq_id_num[cc_id][ant_id] = seq_id; // for next
529 //////////////////////////////////////////
531 static inline uint8_t xran_get_upul_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id)
533 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
534 print_err("Invalid CC ID - %d", cc_id);
537 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) {
538 print_err("Invalid antenna ID - %d", ant_id);
542 return(xran_upul_seq_id_num[cc_id][ant_id]++);
544 static inline uint8_t *xran_get_upul_seqid_addr(void *pHandle, uint8_t cc_id, uint8_t ant_id)
546 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
547 print_err("Invalid CC ID - %d", cc_id);
550 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
551 print_err("Invalid antenna ID - %d", ant_id);
555 return(&xran_upul_seq_id_num[cc_id][ant_id]);
557 static inline int8_t xran_check_cp_seqid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t seq_id)
559 if(dir >= XRAN_DIR_MAX) {
560 print_err("Invalid direction - %d", dir);
563 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
564 print_err("Invalid CC ID - %d", cc_id);
567 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
568 print_err("Invalid antenna ID - %d", ant_id);
572 xran_cp_seq_id_num[cc_id][dir][ant_id]++;
573 if(xran_cp_seq_id_num[cc_id][dir][ant_id] == seq_id) { /* expected sequence */
577 xran_cp_seq_id_num[cc_id][dir][ant_id] = seq_id;
581 static inline int8_t xran_check_updl_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id, uint8_t seq_id)
583 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
584 print_err("Invalid CC ID - %d", cc_id);
588 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
589 print_err("Invalid antenna ID - %d", ant_id);
593 /* O-RU needs to check the sequence ID of U-Plane DL from O-DU */
594 xran_updl_seq_id_num[cc_id][ant_id]++;
595 if(xran_updl_seq_id_num[cc_id][ant_id] == seq_id) {
596 /* expected sequence */
597 /*print_dbg("ant %u cc_id %u : slot_id %u : seq_id %u : expected seq_id %u\n",
598 ant_id, cc_id, slot_id, seq_id, xran_updl_seq_id_num[cc_id][ant_id]);*/
601 /* print_err("ant %u cc_id %u : slot_id %u : seq_id %u : expected seq_id %u\n",
602 ant_id, cc_id, slot_id, seq_id, xran_updl_seq_id_num[cc_id][ant_id]);*/
604 xran_updl_seq_id_num[cc_id][ant_id] = seq_id;
610 uint32_t xran_slotid_convert(uint16_t slot_id, uint16_t dir) //dir = 0, from PHY slotid to xran spec slotid as defined in 5.3.2, dir=1, from xran slotid to phy slotid
616 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
617 uint8_t mu = p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology;
625 return (slot_id << (2-mu));
629 return (slot_id << (3-mu));
636 return (slot_id >> (2-mu));
640 return (slot_id >> (3-mu));
646 static struct xran_section_gen_info cpSections[XRAN_MAX_NUM_SECTIONS];
647 static struct xran_cp_gen_params cpInfo;
648 int process_cplane(struct rte_mbuf *pkt)
650 struct xran_recv_packet_info recv;
652 cpInfo.sections = cpSections;
653 xran_parse_cp_pkt(pkt, &cpInfo, &recv);
657 //////////////////////////////////////////
659 void sym_ota_cb(struct rte_timer *tim, void *arg, unsigned long *used_tick)
661 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
662 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
663 long t1 = MLogTick(), t2;
665 static int32_t ctx = 0;
667 if(XranGetSymNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT) == 0){
669 tti_ota_cb(NULL, arg);
670 *used_tick += get_ticks_diff(xran_tick(), t3);
674 if(XranGetSymNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT) == 3){
675 if(p_xran_dev_ctx->phy_tti_cb_done == 0){
676 /* rearm timer to deliver TTI event to PHY */
678 p_xran_dev_ctx->phy_tti_cb_done = 0;
679 xran_timer_arm_ex(&tti_to_phy_timer[xran_lib_ota_tti % 10], tti_to_phy_cb, (void*)pTCtx, p_xran_dev_ctx->fh_init.io_cfg.timing_core);
680 *used_tick += get_ticks_diff(xran_tick(), t3);
687 if (xran_process_tx_sym(timer_ctx))
689 *used_tick += get_ticks_diff(xran_tick(), t3);
692 /* check if there is call back to do something else on this symbol */
694 struct cb_elem_entry *cb_elm;
695 LIST_FOREACH(cb_elm, &p_xran_dev_ctx->sym_cb_list_head[0][xran_lib_ota_sym], pointers){
697 cb_elm->pSymCallback(&dpdk_timer[ctx], cb_elm->pSymCallbackTag);
698 ctx = DpdkTimerIncrementCtx(ctx);
702 // This counter is incremented in advance before it is the time for the next symbol
704 if(xran_lib_ota_sym >= N_SYM_PER_SLOT){
709 MLogTask(PID_SYM_OTA_CB, t1, t2);
712 void tti_ota_cb(struct rte_timer *tim, void *arg)
714 uint32_t frame_id = 0;
715 uint32_t subframe_id = 0;
716 uint32_t slot_id = 0;
717 uint32_t next_tti = 0;
719 uint32_t mlogVar[10];
720 uint32_t mlogVarCnt = 0;
721 uint64_t t1 = MLogTick();
723 uint32_t reg_tti = 0;
724 uint32_t reg_sfn = 0;
725 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
726 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
728 unsigned tim_lcore = (p_xran_dev_ctx->fh_init.io_cfg.pkt_proc_core) ? p_xran_dev_ctx->pkt_proc_core_id :
729 p_xran_dev_ctx->fh_init.io_cfg.timing_core;
730 MLogTask(PID_TTI_TIMER, t1, MLogTick());
733 if(xran_lib_ota_tti == 0)
734 reg_tti = xran_fs_get_max_slot() - 1;
736 reg_tti = xran_lib_ota_tti -1;
737 MLogIncrementCounter();
738 reg_sfn = XranGetFrameNum(reg_tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME)*10 + XranGetSubFrameNum(reg_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);;
739 /* subframe and slot */
740 MLogRegisterFrameSubframe(reg_sfn, reg_tti % (SLOTNUM_PER_SUBFRAME));
743 slot_id = XranGetSlotNum(xran_lib_ota_tti, SLOTNUM_PER_SUBFRAME);
744 subframe_id = XranGetSubFrameNum(xran_lib_ota_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
745 frame_id = XranGetFrameNum(xran_lib_ota_tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
747 pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process = xran_lib_ota_tti;
749 mlogVar[mlogVarCnt++] = 0x11111111;
750 mlogVar[mlogVarCnt++] = xran_lib_ota_tti;
751 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx;
752 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx / 14;
753 mlogVar[mlogVarCnt++] = frame_id;
754 mlogVar[mlogVarCnt++] = subframe_id;
755 mlogVar[mlogVarCnt++] = slot_id;
756 mlogVar[mlogVarCnt++] = 0;
757 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
759 if(p_xran_dev_ctx->fh_init.io_cfg.id == ID_O_DU)
760 next_tti = xran_lib_ota_tti + 1;
762 next_tti = xran_lib_ota_tti;
764 if(next_tti>= xran_fs_get_max_slot()){
765 print_dbg("[%d]SFN %d sf %d slot %d\n",next_tti, frame_id, subframe_id, slot_id);
769 slot_id = XranGetSlotNum(next_tti, SLOTNUM_PER_SUBFRAME);
770 subframe_id = XranGetSubFrameNum(next_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
771 frame_id = XranGetFrameNum(next_tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
773 print_dbg("[%d]SFN %d sf %d slot %d\n",next_tti, frame_id, subframe_id, slot_id);
775 if(p_xran_dev_ctx->fh_init.io_cfg.id == ID_O_DU){
776 pTCtx[(xran_lib_ota_tti & 1)].tti_to_process = next_tti;
778 pTCtx[(xran_lib_ota_tti & 1)].tti_to_process = pTCtx[(xran_lib_ota_tti & 1)^1].tti_to_process;
781 p_xran_dev_ctx->phy_tti_cb_done = 0;
782 xran_timer_arm_ex(&tti_to_phy_timer[xran_lib_ota_tti % 10], tti_to_phy_cb, (void*)pTCtx, tim_lcore);
784 //slot index is increased to next slot at the beginning of current OTA slot
786 if(xran_lib_ota_tti >= xran_fs_get_max_slot()){
787 print_dbg("[%d]SFN %d sf %d slot %d\n",xran_lib_ota_tti, frame_id, subframe_id, slot_id);
790 MLogTask(PID_TTI_CB, t1, MLogTick());
793 void xran_timer_arm(struct rte_timer *tim, void* arg)
795 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
796 uint64_t t3 = MLogTick();
798 if (xran_if_current_state == XRAN_RUNNING){
799 rte_timer_cb_t fct = (rte_timer_cb_t)arg;
801 rte_timer_reset_sync(tim, 0, SINGLE, p_xran_dev_ctx->fh_init.io_cfg.timing_core, fct, &timer_ctx[0]);
803 MLogTask(PID_TIME_ARM_TIMER, t3, MLogTick());
806 void xran_timer_arm_for_deadline(struct rte_timer *tim, void* arg)
808 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
809 uint64_t t3 = MLogTick();
810 static int timer_cnt = 0;
811 unsigned tim_lcore = (p_xran_dev_ctx->fh_init.io_cfg.pkt_proc_core) ? p_xran_dev_ctx->pkt_proc_core_id :
812 p_xran_dev_ctx->fh_init.io_cfg.timing_core;
817 uint32_t nSubframeIdx;
822 xran_get_slot_idx(&nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
823 rx_tti = nFrameIdx*SUBFRAMES_PER_SYSTEMFRAME*SLOTNUM_PER_SUBFRAME
824 + nSubframeIdx*SLOTNUM_PER_SUBFRAME
827 cb_timer_ctx[timer_cnt].tti_to_process = rx_tti;
829 if (xran_if_current_state == XRAN_RUNNING){
830 rte_timer_cb_t fct = (rte_timer_cb_t)arg;
832 rte_timer_reset_sync(tim, 0, SINGLE, tim_lcore, fct, &cb_timer_ctx[timer_cnt++]);
833 if (timer_cnt >= 10*MAX_NUM_OF_XRAN_CTX)
837 MLogTask(PID_TIME_ARM_TIMER_DEADLINE, t3, MLogTick());
841 void xran_timer_arm_ex(struct rte_timer *tim, void* CbFct, void *CbArg, unsigned tim_lcore)
843 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
844 uint64_t t3 = MLogTick();
846 if (xran_if_current_state == XRAN_RUNNING){
847 rte_timer_cb_t fct = (rte_timer_cb_t)CbFct;
849 rte_timer_reset_sync(tim, 0, SINGLE, tim_lcore, fct, CbArg);
851 MLogTask(PID_TIME_ARM_TIMER, t3, MLogTick());
854 uint16_t xran_map_ecpriRtcid_to_vf(int32_t dir, int32_t cc_id, int32_t ru_port_id)
859 uint16_t xran_map_ecpriPcid_to_vf(int32_t dir, int32_t cc_id, int32_t ru_port_id)
864 int xran_cp_create_and_send_section(void *pHandle, uint8_t ru_port_id, int dir, int tti, int cc_id,
865 struct xran_prb_map *prbMap, enum xran_category category, uint8_t ctx_id)
867 struct xran_device_ctx *p_x_ctx = xran_dev_get_ctx();
868 struct xran_common_counters *pCnt = &p_x_ctx->fh_counters;
869 struct xran_cp_gen_params params;
870 struct xran_section_gen_info sect_geninfo[1];
871 struct rte_mbuf *mbuf;
875 uint32_t nsection = 0;
876 struct xran_prb_elm *pPrbMapElem = NULL;
877 struct xran_prb_elm *pPrbMapElemPrev = NULL;
878 uint32_t slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
879 uint32_t subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
880 uint32_t frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
882 frame_id = (frame_id & 0xff); /* ORAN frameId, 8 bits, [0, 255] */
885 struct xran_sectionext1_info m_ext1;
888 nsection = prbMap->nPrbElm;
889 pPrbMapElem = &prbMap->prbMap[0];
891 print_dbg("cp[%d:%d:%d] ru_port_id %d dir=%d nsection %d\n",
892 frame_id, subframe_id, slot_id, ru_port_id, dir, nsection);
895 print_err("prbMap is NULL\n");
899 for (i=0; i<nsection; i++)
901 pPrbMapElem = &prbMap->prbMap[i];
903 params.sectionType = XRAN_CP_SECTIONTYPE_1; /* Most DL/UL Radio Channels */
904 params.hdr.filterIdx = XRAN_FILTERINDEX_STANDARD;
905 params.hdr.frameId = frame_id;
906 params.hdr.subframeId = subframe_id;
907 params.hdr.slotId = slot_id;
908 params.hdr.startSymId = pPrbMapElem->nStartSymb;
909 params.hdr.iqWidth = pPrbMapElem->iqWidth; /*xran_get_conf_iqwidth(pHandle);*/
910 params.hdr.compMeth = pPrbMapElem->compMethod;
912 print_dbg("cp[%d:%d:%d] ru_port_id %d dir=%d\n",
913 frame_id, subframe_id, slot_id, ru_port_id, dir);
915 seq_id = xran_get_cp_seqid(pHandle, XRAN_DIR_DL, cc_id, ru_port_id);
917 sect_geninfo[0].info.type = params.sectionType; // for database
918 sect_geninfo[0].info.startSymId = params.hdr.startSymId; // for database
919 sect_geninfo[0].info.iqWidth = params.hdr.iqWidth; // for database
920 sect_geninfo[0].info.compMeth = params.hdr.compMeth; // for database
922 sect_geninfo[0].info.id = i; /*xran_alloc_sectionid(pHandle, dir, cc_id, ru_port_id, slot_id);*/
924 if(sect_geninfo[0].info.id > 7)
925 print_err("sectinfo->id %d\n", sect_geninfo[0].info.id);
927 if (dir == XRAN_DIR_UL) {
928 for (loc_sym = 0; loc_sym < XRAN_NUM_OF_SYMBOL_PER_SLOT; loc_sym++){
929 struct xran_section_desc *p_sec_desc = pPrbMapElem->p_sec_desc[loc_sym];
931 p_sec_desc->section_id = sect_geninfo[0].info.id;
932 if(p_sec_desc->pCtrl) {
933 rte_pktmbuf_free(p_sec_desc->pCtrl);
934 p_sec_desc->pCtrl = NULL;
935 p_sec_desc->pData = NULL;
938 print_err("section desc is NULL\n");
943 sect_geninfo[0].info.rb = XRAN_RBIND_EVERY;
944 sect_geninfo[0].info.startPrbc = pPrbMapElem->nRBStart;
945 sect_geninfo[0].info.numPrbc = pPrbMapElem->nRBSize;
946 sect_geninfo[0].info.numSymbol = pPrbMapElem->numSymb;
947 sect_geninfo[0].info.reMask = 0xfff;
948 sect_geninfo[0].info.beamId = pPrbMapElem->nBeamIndex;
950 for (loc_sym = 0; loc_sym < XRAN_NUM_OF_SYMBOL_PER_SLOT; loc_sym++){
951 struct xran_section_desc *p_sec_desc = pPrbMapElem->p_sec_desc[loc_sym];
953 p_sec_desc->section_id = sect_geninfo[0].info.id;
955 sect_geninfo[0].info.sec_desc[loc_sym].iq_buffer_offset = p_sec_desc->iq_buffer_offset;
956 sect_geninfo[0].info.sec_desc[loc_sym].iq_buffer_len = p_sec_desc->iq_buffer_len;
958 print_err("section desc is NULL\n");
963 sect_geninfo[0].info.symInc = XRAN_SYMBOLNUMBER_NOTINC;
966 pPrbMapElemPrev = &prbMap->prbMap[i-1];
967 if (pPrbMapElemPrev->nStartSymb == pPrbMapElem->nStartSymb)
969 sect_geninfo[0].info.symInc = XRAN_SYMBOLNUMBER_NOTINC;
970 if (pPrbMapElemPrev->numSymb != pPrbMapElem->numSymb)
971 print_err("section info error: previous numSymb %d not equal to current numSymb %d\n", pPrbMapElemPrev->numSymb, pPrbMapElem->numSymb);
975 sect_geninfo[0].info.symInc = XRAN_SYMBOLNUMBER_INC;
976 if (pPrbMapElem->nStartSymb != (pPrbMapElemPrev->nStartSymb + pPrbMapElemPrev->numSymb))
977 print_err("section info error: current startSym %d not equal to previous endSymb %d\n", pPrbMapElem->nStartSymb, pPrbMapElemPrev->nStartSymb + pPrbMapElemPrev->numSymb);
981 if(category == XRAN_CATEGORY_A){
982 /* no extention sections for category */
983 sect_geninfo[0].info.ef = 0;
984 sect_geninfo[0].exDataSize = 0;
985 mbuf = xran_ethdi_mbuf_alloc();
986 } else if (category == XRAN_CATEGORY_B) {
987 /*add extantion section for BF Weights if update is needed */
988 if(pPrbMapElem->bf_weight_update){
989 struct rte_mbuf_ext_shared_info * p_share_data = &cp_share_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ru_port_id];
991 if (pPrbMapElem->bf_weight.p_ext_start){
992 /* use buffer with BF Weights for mbuf */
993 mbuf = xran_attach_cp_ext_buf(pPrbMapElem->bf_weight.p_ext_start,
994 pPrbMapElem->bf_weight.p_ext_section, pPrbMapElem->bf_weight.ext_section_sz, p_share_data);
996 print_err("Alloc fail!\n");
1000 memset(&m_ext1, 0, sizeof (struct xran_sectionext1_info));
1001 m_ext1.bfwNumber = pPrbMapElem->bf_weight.nAntElmTRx;
1002 m_ext1.bfwiqWidth = pPrbMapElem->iqWidth;
1003 m_ext1.bfwCompMeth = pPrbMapElem->compMethod;
1004 m_ext1.p_bfwIQ = (int16_t*)pPrbMapElem->bf_weight.p_ext_section;
1005 m_ext1.bfwIQ_sz = pPrbMapElem->bf_weight.ext_section_sz;
1007 sect_geninfo[0].exData[0].type = XRAN_CP_SECTIONEXTCMD_1;
1008 sect_geninfo[0].exData[0].len = sizeof(m_ext1);
1009 sect_geninfo[0].exData[0].data = &m_ext1;
1011 sect_geninfo[0].info.ef = 1;
1012 sect_geninfo[0].exDataSize = 1;
1014 mbuf = xran_ethdi_mbuf_alloc();
1015 sect_geninfo[0].info.ef = 0;
1016 sect_geninfo[0].exDataSize = 0;
1019 print_err("Unsupported Category %d\n", category);
1023 if(unlikely(mbuf == NULL)) {
1024 print_err("Alloc fail!\n");
1028 params.numSections = 1;//nsection;
1029 params.sections = sect_geninfo;
1031 ret = xran_prepare_ctrl_pkt(mbuf, ¶ms, cc_id, ru_port_id, seq_id);
1033 print_err("Fail to build control plane packet - [%d:%d:%d] dir=%d\n",
1034 frame_id, subframe_id, slot_id, dir);
1036 /* add in the ethernet header */
1037 struct rte_ether_hdr *const h = (void *)rte_pktmbuf_prepend(mbuf, sizeof(*h));
1039 pCnt->tx_bytes_counter += rte_pktmbuf_pkt_len(mbuf);
1040 p_x_ctx->send_cpmbuf2ring(mbuf, ETHER_TYPE_ECPRI, xran_map_ecpriRtcid_to_vf(dir, cc_id, ru_port_id));
1042 /*for(i=0; i<nsection; i++)*/
1043 xran_cp_add_section_info(pHandle,
1044 dir, cc_id, ru_port_id,
1046 §_geninfo[0].info);
1053 void tx_cp_dl_cb(struct rte_timer *tim, void *arg)
1055 long t1 = MLogTick();
1057 uint32_t slot_id, subframe_id, frame_id;
1060 uint8_t ant_id, num_eAxc, num_CCPorts;
1063 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1064 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
1066 pHandle = NULL; // TODO: temp implemantation
1067 num_eAxc = xran_get_num_eAxc(pHandle);
1068 num_CCPorts = xran_get_num_cc(pHandle);
1070 if(first_call && p_xran_dev_ctx->enableCP) {
1072 tti = pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process;
1073 buf_id = tti % XRAN_N_FE_BUF_LEN;
1075 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
1076 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
1077 frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
1079 /* Wrap around to next second */
1080 frame_id = (frame_id + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
1083 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
1085 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
1086 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
1087 for(cc_id = 0; cc_id < num_CCPorts; cc_id++ ) {
1088 /* start new section information list */
1089 xran_cp_reset_section_info(pHandle, XRAN_DIR_DL, cc_id, ant_id, ctx_id);
1090 if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_DL) == 1) {
1091 if(p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData){
1092 num_list = xran_cp_create_and_send_section(pHandle, ant_id, XRAN_DIR_DL, tti, cc_id,
1093 (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData,
1094 p_xran_dev_ctx->fh_cfg.ru_conf.xranCat, ctx_id);
1096 print_err("[%d]SFN %d sf %d slot %d: ant_id %d cc_id %d \n", tti, frame_id, subframe_id, slot_id, ant_id, cc_id);
1098 } /* if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_DL) == 1) */
1099 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
1100 } /* for(ant_id = 0; ant_id < num_eAxc; ++ant_id) */
1101 MLogTask(PID_CP_DL_CB, t1, MLogTick());
1105 void rx_ul_deadline_half_cb(struct rte_timer *tim, void *arg)
1107 long t1 = MLogTick();
1108 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1109 xran_status_t status;
1110 /* half of RX for current TTI as measured against current OTA time */
1114 uint32_t nSubframeIdx;
1118 /*xran_get_slot_idx(&nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
1119 rx_tti = nFrameIdx*SUBFRAMES_PER_SYSTEMFRAME*SLOTNUM_PER_SUBFRAME
1120 + nSubframeIdx*SLOTNUM_PER_SUBFRAME
1123 struct xran_timer_ctx* p_timer_ctx = (struct xran_timer_ctx*)arg;
1124 rx_tti = p_timer_ctx->tti_to_process;
1126 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1129 for(cc_id = 0; cc_id < xran_get_num_cc(p_xran_dev_ctx); cc_id++) {
1130 if(p_xran_dev_ctx->rx_packet_callback_tracker[rx_tti % XRAN_N_FE_BUF_LEN][cc_id] == 0){
1131 struct xran_cb_tag *pTag = p_xran_dev_ctx->pCallbackTag[cc_id];
1132 pTag->cellId = cc_id;
1133 pTag->slotiId = rx_tti;
1134 pTag->symbol = 0; /* last 7 sym means full slot of Symb */
1135 status = XRAN_STATUS_SUCCESS;
1136 if(p_xran_dev_ctx->pCallback[cc_id])
1137 p_xran_dev_ctx->pCallback[cc_id](p_xran_dev_ctx->pCallbackTag[cc_id], status);
1139 p_xran_dev_ctx->rx_packet_callback_tracker[rx_tti % XRAN_N_FE_BUF_LEN][cc_id] = 0;
1142 MLogTask(PID_UP_UL_HALF_DEAD_LINE_CB, t1, MLogTick());
1145 void rx_ul_deadline_full_cb(struct rte_timer *tim, void *arg)
1147 long t1 = MLogTick();
1148 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1149 xran_status_t status = 0;
1150 int32_t rx_tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1153 uint32_t nSubframeIdx;
1157 xran_get_slot_idx(&nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
1158 rx_tti = nFrameIdx*SUBFRAMES_PER_SYSTEMFRAME*SLOTNUM_PER_SUBFRAME
1159 + nSubframeIdx*SLOTNUM_PER_SUBFRAME
1163 rx_tti = (xran_fs_get_max_slot_SFN()-1);
1165 rx_tti -= 1; /* end of RX for prev TTI as measured against current OTA time */
1167 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1171 for(cc_id = 0; cc_id < xran_get_num_cc(p_xran_dev_ctx); cc_id++) {
1172 struct xran_cb_tag *pTag = p_xran_dev_ctx->pCallbackTag[cc_id];
1173 pTag->cellId = cc_id;
1174 pTag->slotiId = rx_tti;
1175 pTag->symbol = 7; /* last 7 sym means full slot of Symb */
1176 status = XRAN_STATUS_SUCCESS;
1177 if(p_xran_dev_ctx->pCallback[cc_id])
1178 p_xran_dev_ctx->pCallback[cc_id](p_xran_dev_ctx->pCallbackTag[cc_id], status);
1180 if(p_xran_dev_ctx->pPrachCallback[cc_id]){
1181 struct xran_cb_tag *pTag = p_xran_dev_ctx->pPrachCallbackTag[cc_id];
1182 pTag->cellId = cc_id;
1183 pTag->slotiId = rx_tti;
1184 pTag->symbol = 7; /* last 7 sym means full slot of Symb */
1185 p_xran_dev_ctx->pPrachCallback[cc_id](p_xran_dev_ctx->pPrachCallbackTag[cc_id], status);
1188 if(p_xran_dev_ctx->pSrsCallback[cc_id]){
1189 struct xran_cb_tag *pTag = p_xran_dev_ctx->pSrsCallbackTag[cc_id];
1190 pTag->cellId = cc_id;
1191 pTag->slotiId = rx_tti;
1192 pTag->symbol = 7; /* last 7 sym means full slot of Symb */
1193 p_xran_dev_ctx->pSrsCallback[cc_id](p_xran_dev_ctx->pSrsCallbackTag[cc_id], status);
1197 MLogTask(PID_UP_UL_FULL_DEAD_LINE_CB, t1, MLogTick());
1201 void tx_cp_ul_cb(struct rte_timer *tim, void *arg)
1203 long t1 = MLogTick();
1206 uint32_t slot_id, subframe_id, frame_id;
1208 int ant_id, prach_port_id;
1210 uint8_t num_eAxc, num_CCPorts;
1216 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1217 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
1218 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
1220 pHandle = NULL; // TODO: temp implemantation
1222 if(xran_get_ru_category(pHandle) == XRAN_CATEGORY_A)
1223 num_eAxc = xran_get_num_eAxc(pHandle);
1225 num_eAxc = xran_get_num_eAxcUl(pHandle);
1227 num_CCPorts = xran_get_num_cc(pHandle);
1228 tti = pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process;
1229 buf_id = tti % XRAN_N_FE_BUF_LEN;
1230 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
1231 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
1232 frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
1234 //Wrap around to next second
1235 frame_id = (frame_id + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
1237 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
1239 if(first_call && p_xran_dev_ctx->enableCP) {
1241 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
1243 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
1244 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1245 if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_UL) == 1 ||
1246 xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_SP) == 1 ){
1247 /* start new section information list */
1248 xran_cp_reset_section_info(pHandle, XRAN_DIR_UL, cc_id, ant_id, ctx_id);
1249 num_list = xran_cp_create_and_send_section(pHandle, ant_id, XRAN_DIR_UL, tti, cc_id,
1250 (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData,
1251 p_xran_dev_ctx->fh_cfg.ru_conf.xranCat, ctx_id);
1252 } /* if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_UL) == 1 */
1253 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
1254 } /* for(ant_id = 0; ant_id < num_eAxc; ++ant_id) */
1256 if(p_xran_dev_ctx->enablePrach) {
1257 uint32_t is_prach_slot = xran_is_prach_slot(subframe_id, slot_id);
1258 if(((frame_id % pPrachCPConfig->x) == pPrachCPConfig->y[0]) && (is_prach_slot==1)) { //is prach slot
1259 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
1260 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1261 struct xran_cp_gen_params params;
1262 struct xran_section_gen_info sect_geninfo[8];
1263 struct rte_mbuf *mbuf = xran_ethdi_mbuf_alloc();
1264 prach_port_id = ant_id + num_eAxc;
1265 /* start new section information list */
1266 xran_cp_reset_section_info(pHandle, XRAN_DIR_UL, cc_id, prach_port_id, ctx_id);
1268 beam_id = xran_get_beamid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id, slot_id);
1269 ret = generate_cpmsg_prach(pHandle, ¶ms, sect_geninfo, mbuf, p_xran_dev_ctx,
1270 frame_id, subframe_id, slot_id,
1271 beam_id, cc_id, prach_port_id,
1272 xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id));
1273 if (ret == XRAN_STATUS_SUCCESS)
1274 send_cpmsg(pHandle, mbuf, ¶ms, sect_geninfo,
1275 cc_id, prach_port_id, xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id));
1280 } /* if(p_xran_dev_ctx->enableCP) */
1282 MLogTask(PID_CP_UL_CB, t1, MLogTick());
1285 void ul_up_full_slot_cb(struct rte_timer *tim, void *arg)
1287 long t1 = MLogTick();
1289 MLogTask(PID_TTI_CB_TO_PHY, t1, MLogTick());
1292 void tti_to_phy_cb(struct rte_timer *tim, void *arg)
1294 long t1 = MLogTick();
1295 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1297 p_xran_dev_ctx->phy_tti_cb_done = 1; /* DPDK called CB */
1299 if(p_xran_dev_ctx->ttiCb[XRAN_CB_TTI]){
1300 if(p_xran_dev_ctx->SkipTti[XRAN_CB_TTI] <= 0){
1301 p_xran_dev_ctx->ttiCb[XRAN_CB_TTI](p_xran_dev_ctx->TtiCbParam[XRAN_CB_TTI]);
1303 p_xran_dev_ctx->SkipTti[XRAN_CB_TTI]--;
1307 if(p_xran_dev_ctx->ttiCb[XRAN_CB_TTI]){
1308 int32_t tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1309 uint32_t slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
1310 uint32_t subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
1311 uint32_t frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
1312 if((frame_id == xran_max_frame)&&(subframe_id==9)&&(slot_id == SLOTNUM_PER_SUBFRAME-1)) { //(tti == xran_fs_get_max_slot()-1)
1318 MLogTask(PID_TTI_CB_TO_PHY, t1, MLogTick());
1321 int xran_timing_source_thread(void *args)
1325 int32_t do_reset = 0;
1328 int32_t result1,i,j;
1329 uint32_t delay_cp_dl;
1330 uint32_t delay_cp_ul;
1332 uint32_t delay_up_ul;
1333 uint32_t delay_cp2up;
1338 struct sched_param sched_param;
1339 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1340 uint64_t tWake = 0, tWakePrev = 0, tUsed = 0;
1341 struct cb_elem_entry * cb_elm = NULL;
1343 /* ToS = Top of Second start +- 1.5us */
1348 xran_core_used = rte_lcore_id();
1349 printf("%s [CPU %2d] [PID: %6d]\n", __FUNCTION__, rte_lcore_id(), getpid());
1351 memset(&sched_param, 0, sizeof(struct sched_param));
1353 /* set main thread affinity mask to CPU2 */
1354 sched_param.sched_priority = XRAN_THREAD_DEFAULT_PRIO;
1357 CPU_SET(p_xran_dev_ctx->fh_init.io_cfg.timing_core, &cpuset);
1358 if (result1 = pthread_setaffinity_np(pthread_self(), sizeof(cpu_set_t), &cpuset))
1360 printf("pthread_setaffinity_np failed: coreId = 2, result1 = %d\n",result1);
1362 if ((result1 = pthread_setschedparam(pthread_self(), SCHED_FIFO, &sched_param)))
1364 printf("priority is not changed: coreId = 2, result1 = %d\n",result1);
1367 if (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
1369 timespec_get(&ts, TIME_UTC);
1370 }while (ts.tv_nsec >1500);
1371 struct tm * ptm = gmtime(&ts.tv_sec);
1373 strftime(buff, sizeof buff, "%D %T", ptm);
1374 printf("O-DU: thread_run start time: %s.%09ld UTC [%ld]\n", buff, ts.tv_nsec, interval_us);
1377 delay_cp_dl = interval_us - p_xran_dev_ctx->fh_init.T1a_max_cp_dl;
1378 delay_cp_ul = interval_us - p_xran_dev_ctx->fh_init.T1a_max_cp_ul;
1379 delay_up = p_xran_dev_ctx->fh_init.T1a_max_up;
1380 delay_up_ul = p_xran_dev_ctx->fh_init.Ta4_max;
1382 delay_cp2up = delay_up-delay_cp_dl;
1384 sym_cp_dl = delay_cp_dl*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1385 sym_cp_ul = delay_cp_ul*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1386 sym_up_ul = delay_up_ul*1000/(interval_us*1000/N_SYM_PER_SLOT);
1387 p_xran_dev_ctx->sym_up = sym_up = -(delay_up*1000/(interval_us*1000/N_SYM_PER_SLOT));
1388 p_xran_dev_ctx->sym_up_ul = sym_up_ul = (delay_up_ul*1000/(interval_us*1000/N_SYM_PER_SLOT)+1);
1390 printf("Start C-plane DL %d us after TTI [trigger on sym %d]\n", delay_cp_dl, sym_cp_dl);
1391 printf("Start C-plane UL %d us after TTI [trigger on sym %d]\n", delay_cp_ul, sym_cp_ul);
1392 printf("Start U-plane DL %d us before OTA [offset in sym %d]\n", delay_up, sym_up);
1393 printf("Start U-plane UL %d us OTA [offset in sym %d]\n", delay_up_ul, sym_up_ul);
1395 printf("C-plane to U-plane delay %d us after TTI\n", delay_cp2up);
1396 printf("Start Sym timer %ld ns\n", TX_TIMER_INTERVAL/N_SYM_PER_SLOT);
1398 cb_elm = xran_create_cb(xran_timer_arm, tx_cp_dl_cb);
1400 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_cp_dl],
1404 print_err("cb_elm is NULL\n");
1409 cb_elm = xran_create_cb(xran_timer_arm, tx_cp_ul_cb);
1411 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_cp_ul],
1415 print_err("cb_elm is NULL\n");
1420 /* Full slot UL OTA + delay_up_ul */
1421 cb_elm = xran_create_cb(xran_timer_arm_for_deadline, rx_ul_deadline_full_cb);
1423 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_up_ul],
1427 print_err("cb_elm is NULL\n");
1432 /* Half slot UL OTA + delay_up_ul*/
1433 cb_elm = xran_create_cb(xran_timer_arm_for_deadline, rx_ul_deadline_half_cb);
1435 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_up_ul + N_SYM_PER_SLOT/2],
1439 print_err("cb_elm is NULL\n");
1443 } else { // APP_O_RU
1444 /* calcualte when to send UL U-plane */
1445 delay_up = p_xran_dev_ctx->fh_init.Ta3_min;
1446 p_xran_dev_ctx->sym_up = sym_up = delay_up*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1447 printf("Start UL U-plane %d us after OTA [offset in sym %d]\n", delay_up, sym_up);
1449 timespec_get(&ts, TIME_UTC);
1450 }while (ts.tv_nsec >1500);
1451 struct tm * ptm = gmtime(&ts.tv_sec);
1453 strftime(buff, sizeof buff, "%D %T", ptm);
1454 printf("RU: thread_run start time: %s.%09ld UTC [%ld]\n", buff, ts.tv_nsec, interval_us);
1458 printf("interval_us %ld\n", interval_us);
1460 timespec_get(&ts, TIME_UTC);
1461 }while (ts.tv_nsec == 0);
1464 /* Update Usage Stats */
1465 tWake = xran_tick();
1466 xran_used_tick += tUsed;
1469 xran_total_tick += get_ticks_diff(tWake, tWakePrev);
1474 delta = poll_next_tick(interval_us*1000L/N_SYM_PER_SLOT, &tUsed);
1475 if (XRAN_STOPPED == xran_if_current_state)
1478 if (likely(XRAN_RUNNING == xran_if_current_state))
1479 sym_ota_cb(&sym_timer, timer_ctx, &tUsed);
1483 for (i = 0; i< XRAN_MAX_SECTOR_NR; i++){
1484 for (j = 0; j< XRAN_NUM_OF_SYMBOL_PER_SLOT; j++){
1485 struct cb_elem_entry *cb_elm;
1486 LIST_FOREACH(cb_elm, &p_xran_dev_ctx->sym_cb_list_head[i][j], pointers){
1488 LIST_REMOVE(cb_elm, pointers);
1489 xran_destroy_cb(cb_elm);
1495 printf("Closing timing source thread...\n");
1499 /* Handle ecpri format. */
1500 int handle_ecpri_ethertype(struct rte_mbuf *pkt, uint64_t rx_time)
1502 const struct xran_ecpri_hdr *ecpri_hdr;
1504 int32_t ret = MBUF_FREE;
1506 if (rte_pktmbuf_data_len(pkt) < sizeof(struct xran_ecpri_hdr)) {
1507 print_err("Packet too short - %d bytes", rte_pktmbuf_data_len(pkt));
1511 /* check eCPRI header. */
1512 ecpri_hdr = rte_pktmbuf_mtod(pkt, struct xran_ecpri_hdr *);
1513 if(ecpri_hdr == NULL){
1514 print_err("ecpri_hdr error\n");
1518 xran_dev_get_ctx()->fh_counters.rx_bytes_counter += rte_pktmbuf_pkt_len(pkt);
1519 switch(ecpri_hdr->cmnhdr.ecpri_mesg_type) {
1522 ret = process_mbuf(pkt);
1523 // MLogTask(PID_PROCESS_UP_PKT, t1, MLogTick());
1526 case ECPRI_RT_CONTROL_DATA:
1528 if(xran_dev_get_ctx()->fh_init.io_cfg.id == O_RU) {
1529 ret = process_cplane(pkt);
1530 xran_dev_get_ctx()->fh_counters.rx_counter++;
1532 print_err("O-DU recevied C-Plane message!");
1534 MLogTask(PID_PROCESS_CP_PKT, t1, MLogTick());
1537 print_err("Invalid eCPRI message type - %d", ecpri_hdr->cmnhdr.ecpri_mesg_type);
1543 int xran_process_prach_sym(void *arg,
1544 struct rte_mbuf *mbuf,
1545 void *iq_data_start,
1550 uint8_t subframe_id,
1554 uint16_t start_prbu,
1561 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1562 uint8_t symb_id_offset;
1564 xran_status_t status;
1565 void *pHandle = NULL;
1566 struct rte_mbuf *mb;
1568 uint16_t iq_sample_size_bits = 16;
1570 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1573 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1575 status = tti << 16 | symb_id;
1577 if(CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < XRAN_MAX_ANTENNA_NR && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT){
1578 symb_id_offset = symb_id - p_xran_dev_ctx->prach_start_symbol[CC_ID]; //make the storing of prach packets to start from 0 for easy of processing within PHY
1579 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pData;
1580 if(pos && iq_data_start && size){
1581 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1583 uint16_t *psrc = (uint16_t *)iq_data_start;
1584 uint16_t *pdst = (uint16_t *)pos;
1585 /* network byte (be) order of IQ to CPU byte order (le) */
1586 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1587 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1589 *mb_free = MBUF_FREE;
1591 mb = p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pCtrl;
1593 rte_pktmbuf_free(mb);
1595 print_err("mb==NULL\n");
1597 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pData = iq_data_start;
1598 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pCtrl = mbuf;
1599 *mb_free = MBUF_KEEP;
1602 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1605 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1611 int32_t xran_process_srs_sym(void *arg,
1612 struct rte_mbuf *mbuf,
1613 void *iq_data_start,
1618 uint8_t subframe_id,
1622 uint16_t start_prbu,
1629 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1631 xran_status_t status;
1632 struct rte_mbuf *mb = NULL;
1634 uint16_t iq_sample_size_bits = 16;
1636 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1639 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1641 status = tti << 16 | symb_id;
1643 if(CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < p_xran_dev_ctx->fh_cfg.nAntElmTRx && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT) {
1644 pos = (char*) p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData;
1645 pos += start_prbu * N_SC_PER_PRB*(iq_sample_size_bits/8)*2;
1646 if(pos && iq_data_start && size){
1647 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1649 uint16_t *psrc = (uint16_t *)iq_data_start;
1650 uint16_t *pdst = (uint16_t *)pos;
1651 rte_panic("XRAN_CPU_LE_BYTE_ORDER is not supported 0x16%lx\n", (long)mb);
1652 /* network byte (be) order of IQ to CPU byte order (le) */
1653 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1654 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1656 } else if (likely(p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_NE_BE_BYTE_ORDER)){
1657 if (likely (p_xran_dev_ctx->fh_init.mtu >=
1658 p_xran_dev_ctx->fh_cfg.nULRBs * N_SC_PER_PRB*(iq_sample_size_bits/8)*2)) {
1659 /* no fragmentation */
1660 mb = p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl;
1662 rte_pktmbuf_free(mb);
1664 print_err("mb==NULL\n");
1666 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData = iq_data_start;
1667 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl = mbuf;
1668 *mb_free = MBUF_KEEP;
1670 /* packet can be fragmented copy RBs */
1671 rte_memcpy(pos, iq_data_start, size);
1672 *mb_free = MBUF_FREE;
1676 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1679 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1685 int32_t xran_pkt_validate(void *arg,
1686 struct rte_mbuf *mbuf,
1687 void *iq_data_start,
1692 uint8_t subframe_id,
1695 struct ecpri_seq_id *seq_id,
1697 uint16_t start_prbu,
1702 struct xran_device_ctx * pctx = xran_dev_get_ctx();
1703 struct xran_common_counters *pCnt = &pctx->fh_counters;
1705 if(pctx->fh_init.io_cfg.id == O_DU) {
1706 if(xran_check_upul_seqid(NULL, CC_ID, Ant_ID, slot_id, seq_id->seq_id) != XRAN_STATUS_SUCCESS) {
1707 pCnt->Rx_pkt_dupl++;
1708 return (XRAN_STATUS_FAIL);
1710 }else if(pctx->fh_init.io_cfg.id == O_RU) {
1711 if(xran_check_updl_seqid(NULL, CC_ID, Ant_ID, slot_id, seq_id->seq_id) != XRAN_STATUS_SUCCESS) {
1712 pCnt->Rx_pkt_dupl++;
1713 return (XRAN_STATUS_FAIL);
1716 print_err("incorrect dev type %d\n", pctx->fh_init.io_cfg.id);
1722 pCnt->Total_msgs_rcvd++;
1724 return XRAN_STATUS_SUCCESS;
1727 int32_t xran_process_rx_sym(void *arg,
1728 struct rte_mbuf *mbuf,
1729 void *iq_data_start,
1734 uint8_t subframe_id,
1738 uint16_t start_prbu,
1745 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1747 xran_status_t status;
1748 void *pHandle = NULL;
1749 struct rte_mbuf *mb = NULL;
1750 struct xran_prb_map * pRbMap = NULL;
1751 struct xran_prb_elm * prbMapElm = NULL;
1753 uint16_t iq_sample_size_bits = 16;
1755 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1757 status = tti << 16 | symb_id;
1759 if(CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < XRAN_MAX_ANTENNA_NR && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT){
1760 pos = (char*) p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData;
1761 pRbMap = (struct xran_prb_map *) p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers->pData;
1763 prbMapElm = &pRbMap->prbMap[sect_id];
1764 if(sect_id >= pRbMap->nPrbElm) {
1765 print_err("sect_id %d !=pRbMap->nPrbElm %d\n", sect_id,pRbMap->nPrbElm);
1766 *mb_free = MBUF_FREE;
1770 print_err("pRbMap==NULL\n");
1771 *mb_free = MBUF_FREE;
1775 pos += start_prbu * N_SC_PER_PRB*(iq_sample_size_bits/8)*2;
1776 if(pos && iq_data_start && size){
1777 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1779 uint16_t *psrc = (uint16_t *)iq_data_start;
1780 uint16_t *pdst = (uint16_t *)pos;
1781 rte_panic("XRAN_CPU_LE_BYTE_ORDER is not supported 0x16%lx\n", (long)mb);
1782 /* network byte (be) order of IQ to CPU byte order (le) */
1783 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1784 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1786 } else if (likely(p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_NE_BE_BYTE_ORDER)){
1787 if (pRbMap->nPrbElm == 1){
1788 if (likely (p_xran_dev_ctx->fh_init.mtu >=
1789 p_xran_dev_ctx->fh_cfg.nULRBs * N_SC_PER_PRB*(iq_sample_size_bits/8)*2))
1791 /* no fragmentation */
1792 mb = p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl;
1794 rte_pktmbuf_free(mb);
1796 print_err("mb==NULL\n");
1798 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData = iq_data_start;
1799 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl = mbuf;
1800 *mb_free = MBUF_KEEP;
1802 /* packet can be fragmented copy RBs */
1803 rte_memcpy(pos, iq_data_start, size);
1804 *mb_free = MBUF_FREE;
1807 prbMapElm = &pRbMap->prbMap[sect_id];
1808 struct xran_section_desc *p_sec_desc = prbMapElm->p_sec_desc[symb_id];
1810 mb = p_sec_desc->pCtrl;
1812 rte_pktmbuf_free(mb);
1814 p_sec_desc->pData = iq_data_start;
1815 p_sec_desc->pCtrl = mbuf;
1816 p_sec_desc->iq_buffer_len = size;
1817 p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(iq_data_start, mbuf);
1819 print_err("p_sec_desc==NULL\n");
1820 *mb_free = MBUF_FREE;
1823 *mb_free = MBUF_KEEP;
1827 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1830 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1836 /* Send burst of packets on an output interface */
1838 xran_send_burst(struct xran_device_ctx *dev, uint16_t n, uint16_t port)
1840 struct xran_common_counters * pCnt = NULL;
1841 struct rte_mbuf **m_table;
1849 pCnt = &dev->fh_counters;
1851 rte_panic("incorrect dev\n");
1853 m_table = (struct rte_mbuf **)dev->tx_mbufs[port].m_table;
1855 for(i = 0; i < n; i++){
1856 rte_mbuf_sanity_check(m_table[i], 0);
1857 /*rte_pktmbuf_dump(stdout, m_table[i], 256);*/
1859 pCnt->tx_bytes_counter += rte_pktmbuf_pkt_len(m_table[i]);
1860 ret += dev->send_upmbuf2ring(m_table[i], ETHER_TYPE_ECPRI, port);
1863 if (unlikely(ret < n)) {
1864 print_err("ret < n\n");
1870 int32_t xran_process_tx_sym_cp_off(uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id,
1875 void *pHandle = NULL;
1877 char *p_sec_iq = NULL;
1878 //char *p_sect_iq = NULL;
1880 void *send_mb = NULL;
1882 uint16_t iq_sample_size_bits = 16; // TODO: make dynamic per
1884 struct xran_prb_map *prb_map = NULL;
1885 uint8_t num_ant_elm = 0;
1887 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1888 struct xran_common_counters * pCnt = &p_xran_dev_ctx->fh_counters;
1889 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
1890 struct xran_srs_config *p_srs_cfg = &(p_xran_dev_ctx->srs_cfg);
1891 num_ant_elm = xran_get_num_ant_elm(pHandle);
1892 enum xran_pkt_dir direction;
1894 struct rte_mbuf *eth_oran_hdr = NULL;
1895 char *ext_buff = NULL;
1896 uint16_t ext_buff_len = 0;
1897 struct rte_mbuf *tmp = NULL;
1898 rte_iova_t ext_buff_iova = 0;
1900 struct rte_mbuf_ext_shared_info * p_share_data = &share_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id];
1902 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
1903 direction = XRAN_DIR_DL; /* O-DU */
1904 prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
1906 direction = XRAN_DIR_UL; /* RU */
1907 prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
1910 if(xran_fs_get_slot_type(cc_id, tti, ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SLOT_TYPE_DL : XRAN_SLOT_TYPE_UL)) == 1
1911 || xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_SP) == 1
1912 || xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_FDD) == 1){
1914 if(xran_fs_get_symbol_type(cc_id, tti, sym_id) == ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SYMBOL_TYPE_DL : XRAN_SYMBOL_TYPE_UL)
1915 || xran_fs_get_symbol_type(cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_FDD){
1917 pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1918 mb = (void*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
1919 prb_map = (struct xran_prb_map *) p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers->pData;
1924 for (elmIdx = 0; elmIdx < prb_map->nPrbElm; elmIdx++){
1925 uint16_t sec_id = elmIdx;
1926 struct xran_prb_elm * prb_map_elm = &prb_map->prbMap[elmIdx];
1927 struct xran_section_desc * p_sec_desc = NULL;
1929 if(prb_map_elm == NULL){
1930 rte_panic("p_sec_desc == NULL\n");
1933 p_sec_desc = prb_map_elm->p_sec_desc[sym_id];
1936 p_sec_iq = ((char*)pos + p_sec_desc->iq_buffer_offset);
1938 /* calculete offset for external buffer */
1939 ext_buff_len = p_sec_desc->iq_buffer_len;
1940 ext_buff = p_sec_iq - (RTE_PKTMBUF_HEADROOM +
1941 sizeof (struct xran_ecpri_hdr) +
1942 sizeof (struct radio_app_common_hdr) +
1943 sizeof(struct data_section_hdr));
1945 ext_buff_len += RTE_PKTMBUF_HEADROOM +
1946 sizeof (struct xran_ecpri_hdr) +
1947 sizeof (struct radio_app_common_hdr) +
1948 sizeof(struct data_section_hdr) + 18;
1950 if(prb_map_elm->compMethod != XRAN_COMPMETHOD_NONE){
1951 ext_buff -= sizeof (struct data_section_compression_hdr);
1952 ext_buff_len += sizeof (struct data_section_compression_hdr);
1955 eth_oran_hdr = rte_pktmbuf_alloc(_eth_mbuf_pool_small);
1957 if (unlikely (( eth_oran_hdr) == NULL)) {
1958 rte_panic("Failed rte_pktmbuf_alloc\n");
1961 p_share_data->free_cb = extbuf_free_callback;
1962 p_share_data->fcb_opaque = NULL;
1963 rte_mbuf_ext_refcnt_set(p_share_data, 1);
1965 ext_buff_iova = rte_mempool_virt2iova(mb);
1966 if (unlikely (( ext_buff_iova) == 0)) {
1967 rte_panic("Failed rte_mem_virt2iova \n");
1970 if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
1971 rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
1974 rte_pktmbuf_attach_extbuf(eth_oran_hdr,
1976 ext_buff_iova + RTE_PTR_DIFF(ext_buff , mb),
1980 rte_pktmbuf_reset_headroom(eth_oran_hdr);
1982 tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(eth_oran_hdr, sizeof(struct rte_ether_hdr));
1983 if (unlikely (( tmp) == NULL)) {
1984 rte_panic("Failed rte_pktmbuf_prepend \n");
1986 send_mb = eth_oran_hdr;
1989 uint8_t seq_id = (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1990 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1991 xran_get_upul_seqid(pHandle, cc_id, ant_id);
1995 /* first all PRBs */
1996 int32_t num_bytes = prepare_symbol_ex(direction, sec_id,
1998 (struct rb_map *)p_sec_iq,
1999 prb_map_elm->compMethod,
2000 prb_map_elm->iqWidth,
2001 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2002 frame_id, subframe_id, slot_id, sym_id,
2003 prb_map_elm->nRBStart, prb_map_elm->nRBSize,
2008 rte_mbuf_sanity_check((struct rte_mbuf *)send_mb, 0);
2010 pCnt->tx_bytes_counter += rte_pktmbuf_pkt_len((struct rte_mbuf *)send_mb);
2011 p_xran_dev_ctx->send_upmbuf2ring((struct rte_mbuf *)send_mb, ETHER_TYPE_ECPRI, xran_map_ecpriPcid_to_vf(direction, cc_id, ant_id));
2013 p_sect_iq = pos + p_sec_desc->iq_buffer_offset;
2014 prb_num = prb_map_elm->nRBSize;
2016 if( prb_num > 136 || prb_num == 0) {
2017 /* first 136 PRBs */
2018 rte_panic("first 136 PRBs\n");
2019 send_symbol_ex(direction,
2022 (struct rb_map *)p_sect_iq,
2023 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2024 frame_id, subframe_id, slot_id, sym_id,
2027 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
2028 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
2029 xran_get_upul_seqid(pHandle, cc_id, ant_id));
2031 pos += 136 * N_SC_PER_PRB * (iq_sample_size_bits/8)*2;
2033 send_symbol_ex(direction, sec_id,
2035 (struct rb_map *)p_sect_iq,
2036 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2037 frame_id, subframe_id, slot_id, sym_id,
2040 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
2041 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
2042 xran_get_upul_seqid(pHandle, cc_id, ant_id));
2045 send_symbol_ex(direction,
2046 sec_id, /* xran_alloc_sectionid(pHandle, direction, cc_id, ant_id, slot_id)*/
2047 /*(struct rte_mbuf *)mb*/ NULL,
2048 (struct rb_map *)p_sect_iq,
2049 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2050 frame_id, subframe_id, slot_id, sym_id,
2051 prb_map_elm->nRBStart, prb_map_elm->nRBSize,
2053 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
2054 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
2055 xran_get_upul_seqid(pHandle, cc_id, ant_id));
2063 printf("(%d %d %d %d) prb_map == NULL\n", tti % XRAN_N_FE_BUF_LEN, cc_id, ant_id, sym_id);
2066 if(p_xran_dev_ctx->enablePrach
2067 && (p_xran_dev_ctx->fh_init.io_cfg.id == O_RU)) { /* Only RU needs to send PRACH I/Q */
2068 uint32_t is_prach_slot = xran_is_prach_slot(subframe_id, slot_id);
2069 if(((frame_id % pPrachCPConfig->x) == pPrachCPConfig->y[0])
2070 && (is_prach_slot == 1)
2071 && (sym_id >= p_xran_dev_ctx->prach_start_symbol[cc_id])
2072 && (sym_id <= p_xran_dev_ctx->prach_last_symbol[cc_id])) { //is prach slot
2073 int prach_port_id = ant_id + pPrachCPConfig->eAxC_offset;
2074 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pData;
2075 pos += (sym_id - p_xran_dev_ctx->prach_start_symbol[cc_id]) * pPrachCPConfig->numPrbc * N_SC_PER_PRB * 4;
2076 mb = NULL;//(void*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pCtrl;
2078 send_symbol_ex(direction,
2079 xran_alloc_sectionid(pHandle, direction, cc_id, prach_port_id, slot_id),
2080 (struct rte_mbuf *)mb,
2081 (struct rb_map *)pos,
2082 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2083 frame_id, subframe_id, slot_id, sym_id,
2084 pPrachCPConfig->startPrbc, pPrachCPConfig->numPrbc,
2085 cc_id, prach_port_id,
2086 xran_get_upul_seqid(pHandle, cc_id, prach_port_id));
2088 } /* if((frame_id % pPrachCPConfig->x == pPrachCPConfig->y[0]) .... */
2089 } /* if(p_xran_dev_ctx->enablePrach ..... */
2092 if(p_xran_dev_ctx->enableSrs && (p_xran_dev_ctx->fh_init.io_cfg.id == O_RU)){
2093 if( p_srs_cfg->symbMask & (1 << sym_id) /* is SRS symbol */
2095 int32_t ant_elm_id = 0;
2097 for (ant_elm_id = 0; ant_elm_id < num_ant_elm; ant_elm_id++){
2098 int32_t ant_elm_eAxC_id = ant_elm_id + p_srs_cfg->eAxC_offset;
2100 pos = (char*) p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_elm_id].sBufferList.pBuffers[sym_id].pData;
2101 mb = (void*) p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_elm_id].sBufferList.pBuffers[sym_id].pCtrl;
2103 if( prb_num > 136 || prb_num == 0) {
2104 uint16_t sec_id = xran_alloc_sectionid(pHandle, direction, cc_id, ant_elm_id, slot_id);
2105 /* first 136 PRBs */
2106 send_symbol_ex(direction,
2109 (struct rb_map *)pos,
2110 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2111 frame_id, subframe_id, slot_id, sym_id,
2113 cc_id, ant_elm_eAxC_id,
2114 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
2115 xran_get_updl_seqid(pHandle, cc_id, ant_elm_eAxC_id) :
2116 xran_get_upul_seqid(pHandle, cc_id, ant_elm_eAxC_id));
2118 pos += 136 * N_SC_PER_PRB * (iq_sample_size_bits/8)*2;
2120 send_symbol_ex(direction, sec_id,
2122 (struct rb_map *)pos,
2123 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2124 frame_id, subframe_id, slot_id, sym_id,
2126 cc_id, ant_elm_eAxC_id,
2127 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
2128 xran_get_updl_seqid(pHandle, cc_id, ant_elm_eAxC_id) :
2129 xran_get_upul_seqid(pHandle, cc_id, ant_elm_eAxC_id));
2131 send_symbol_ex(direction,
2132 xran_alloc_sectionid(pHandle, direction, cc_id, ant_elm_eAxC_id, slot_id),
2133 (struct rte_mbuf *)mb,
2134 (struct rb_map *)pos,
2135 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2136 frame_id, subframe_id, slot_id, sym_id,
2138 cc_id, ant_elm_eAxC_id,
2139 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
2140 xran_get_updl_seqid(pHandle, cc_id, ant_elm_eAxC_id) :
2141 xran_get_upul_seqid(pHandle, cc_id, ant_elm_eAxC_id));
2144 } /* for ant elem */
2147 } /* RU mode or C-Plane is not used */
2154 xran_attach_cp_ext_buf(int8_t* p_ext_buff_start, int8_t* p_ext_buff, uint16_t ext_buff_len,
2155 struct rte_mbuf_ext_shared_info * p_share_data)
2157 struct rte_mbuf *mb_oran_hdr_ext = NULL;
2158 struct rte_mbuf *tmp = NULL;
2159 int8_t *ext_buff = NULL;
2160 rte_iova_t ext_buff_iova = 0;
2162 ext_buff = p_ext_buff - (RTE_PKTMBUF_HEADROOM +
2163 sizeof(struct xran_ecpri_hdr) +
2164 sizeof(struct xran_cp_radioapp_section1_header) +
2165 sizeof(struct xran_cp_radioapp_section1));
2167 ext_buff_len += (RTE_PKTMBUF_HEADROOM +
2168 sizeof(struct xran_ecpri_hdr) +
2169 sizeof(struct xran_cp_radioapp_section1_header) +
2170 sizeof(struct xran_cp_radioapp_section1)) + 18;
2172 mb_oran_hdr_ext = rte_pktmbuf_alloc(_eth_mbuf_pool_small);
2174 if (unlikely (( mb_oran_hdr_ext) == NULL)) {
2175 rte_panic("Failed rte_pktmbuf_alloc\n");
2178 p_share_data->free_cb = extbuf_free_callback;
2179 p_share_data->fcb_opaque = NULL;
2180 rte_mbuf_ext_refcnt_set(p_share_data, 1);
2182 ext_buff_iova = rte_malloc_virt2iova(p_ext_buff_start);
2183 if (unlikely (( ext_buff_iova) == 0)) {
2184 rte_panic("Failed rte_mem_virt2iova \n");
2187 if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
2188 rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
2191 rte_pktmbuf_attach_extbuf(mb_oran_hdr_ext,
2193 ext_buff_iova + RTE_PTR_DIFF(ext_buff , p_ext_buff_start),
2197 rte_pktmbuf_reset_headroom(mb_oran_hdr_ext);
2199 return mb_oran_hdr_ext;
2204 xran_attach_up_ext_buf(int8_t* p_ext_buff_start, int8_t* p_ext_buff, uint16_t ext_buff_len,
2205 struct rte_mbuf_ext_shared_info * p_share_data,
2206 enum xran_compression_method compMeth)
2208 struct rte_mbuf *mb_oran_hdr_ext = NULL;
2209 struct rte_mbuf *tmp = NULL;
2210 int8_t *ext_buff = NULL;
2211 rte_iova_t ext_buff_iova = 0;
2213 ext_buff = p_ext_buff - (RTE_PKTMBUF_HEADROOM +
2214 sizeof(struct xran_ecpri_hdr) +
2215 sizeof(struct radio_app_common_hdr) +
2216 sizeof(struct data_section_hdr));
2218 ext_buff_len += RTE_PKTMBUF_HEADROOM +
2219 sizeof(struct xran_ecpri_hdr) +
2220 sizeof(struct radio_app_common_hdr) +
2221 sizeof(struct data_section_hdr) + 18;
2223 if(compMeth != XRAN_COMPMETHOD_NONE) {
2224 ext_buff -= sizeof (struct data_section_compression_hdr);
2225 ext_buff_len += sizeof (struct data_section_compression_hdr);
2228 mb_oran_hdr_ext = rte_pktmbuf_alloc(_eth_mbuf_pool_small);
2230 if (unlikely (( mb_oran_hdr_ext) == NULL)) {
2231 rte_panic("Failed rte_pktmbuf_alloc\n");
2234 p_share_data->free_cb = extbuf_free_callback;
2235 p_share_data->fcb_opaque = NULL;
2236 rte_mbuf_ext_refcnt_set(p_share_data, 1);
2238 ext_buff_iova = rte_mempool_virt2iova(p_ext_buff_start);
2239 if (unlikely (( ext_buff_iova) == 0)) {
2240 rte_panic("Failed rte_mem_virt2iova \n");
2243 if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
2244 rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
2247 rte_pktmbuf_attach_extbuf(mb_oran_hdr_ext,
2249 ext_buff_iova + RTE_PTR_DIFF(ext_buff , p_ext_buff_start),
2253 rte_pktmbuf_reset_headroom(mb_oran_hdr_ext);
2255 tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(mb_oran_hdr_ext, sizeof(struct rte_ether_hdr));
2256 if (unlikely (( tmp) == NULL)) {
2257 rte_panic("Failed rte_pktmbuf_prepend \n");
2260 return mb_oran_hdr_ext;
2263 int32_t xran_process_tx_sym_cp_on(uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id,
2264 uint32_t slot_id, uint32_t sym_id)
2268 struct rte_mbuf *eth_oran_hdr = NULL;
2269 char *ext_buff = NULL;
2270 uint16_t ext_buff_len = 0;
2271 struct rte_mbuf *tmp = NULL;
2272 rte_iova_t ext_buff_iova = 0;
2273 void *pHandle = NULL;
2275 char *p_sec_iq = NULL;
2278 uint16_t iq_sample_size_bits = 16; // TODO: make dynamic per
2280 int32_t num_sections = 0;
2282 struct xran_section_info *sectinfo = NULL;
2283 struct xran_device_ctx *p_xran_dev_ctx = xran_dev_get_ctx();
2284 enum xran_pkt_dir direction;
2286 struct rte_mbuf_ext_shared_info * p_share_data = &share_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id];
2288 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
2289 direction = XRAN_DIR_DL; /* O-DU */
2290 prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
2292 direction = XRAN_DIR_UL; /* RU */
2293 prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
2297 num_sections = xran_cp_getsize_section_info(pHandle, direction, cc_id, ant_id, ctx_id);
2298 /* iterate C-Plane configuration to generate corresponding U-Plane */
2299 while(next < num_sections) {
2300 sectinfo = xran_cp_iterate_section_info(pHandle, direction, cc_id, ant_id, ctx_id, &next);
2302 if(sectinfo == NULL)
2305 if(sectinfo->type != XRAN_CP_SECTIONTYPE_1) { /* only supports type 1 */
2306 print_err("Invalid section type in section DB - %d", sectinfo->type);
2310 /* skip, if not scheduled */
2311 if(sym_id < sectinfo->startSymId || sym_id >= sectinfo->startSymId + sectinfo->numSymbol)
2314 if(sectinfo->compMeth)
2315 iq_sample_size_bits = sectinfo->iqWidth;
2317 print_dbg(">>> sym %2d [%d] type%d, id %d, startPrbc=%d, numPrbc=%d, numSymbol=%d\n", sym_id, next,
2318 sectinfo->type, sectinfo->id, sectinfo->startPrbc,
2319 sectinfo->numPrbc, sectinfo->numSymbol);
2321 p_xran_dev_ctx->tx_mbufs[0].len = 0;
2322 uint16_t len = p_xran_dev_ctx->tx_mbufs[0].len;
2326 //Added for Klocworks
2327 if (len >= MBUF_TABLE_SIZE)
2328 len = MBUF_TABLE_SIZE - 1;
2330 pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
2331 mb = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
2333 p_sec_iq = ((char*)pos + sectinfo->sec_desc[sym_id].iq_buffer_offset);
2334 ext_buff_len = sectinfo->sec_desc[sym_id].iq_buffer_len;
2336 mb = xran_attach_up_ext_buf((int8_t *)mb, (int8_t *) p_sec_iq,
2337 (uint16_t) ext_buff_len,
2338 p_share_data, (enum xran_compression_method) sectinfo->compMeth);
2339 /* first all PRBs */
2340 prepare_symbol_ex(direction, sectinfo->id,
2342 (struct rb_map *)p_sec_iq,
2345 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2346 frame_id, subframe_id, slot_id, sym_id,
2347 sectinfo->startPrbc, sectinfo->numPrbc,
2349 xran_get_updl_seqid(pHandle, cc_id, ant_id),
2352 /* if we don't need to do any fragmentation */
2353 if (likely (p_xran_dev_ctx->fh_init.mtu >=
2354 sectinfo->numPrbc * (3*iq_sample_size_bits + 1))) {
2355 /* no fragmentation */
2356 p_xran_dev_ctx->tx_mbufs[0].m_table[len] = mb;
2360 uint8_t * seq_num = xran_get_updl_seqid_addr(pHandle, cc_id, ant_id);
2364 rte_panic("pointer to seq number is NULL [CC %d Ant %d]\n", cc_id, ant_id);
2366 len2 = xran_app_fragment_packet(mb,
2367 &p_xran_dev_ctx->tx_mbufs[0].m_table[len],
2368 (uint16_t)(MBUF_TABLE_SIZE - len),
2369 p_xran_dev_ctx->fh_init.mtu,
2370 p_xran_dev_ctx->direct_pool,
2371 p_xran_dev_ctx->indirect_pool,
2375 /* Free input packet */
2376 rte_pktmbuf_free(mb);
2378 /* If we fail to fragment the packet */
2379 if (unlikely (len2 < 0)){
2380 print_err("len2= %d\n", len2);
2386 for (i = len; i < len + len2; i ++) {
2388 m = p_xran_dev_ctx->tx_mbufs[0].m_table[i];
2389 struct rte_ether_hdr *eth_hdr = (struct rte_ether_hdr *)
2390 rte_pktmbuf_prepend(m, (uint16_t)sizeof(struct rte_ether_hdr));
2391 if (eth_hdr == NULL) {
2392 rte_panic("No headroom in mbuf.\n");
2399 if (unlikely(len > XRAN_MAX_PKT_BURST_PER_SYM)) {
2400 rte_panic("XRAN_MAX_PKT_BURST_PER_SYM\n");
2403 /* Transmit packets */
2404 xran_send_burst(p_xran_dev_ctx, (uint16_t)len, xran_map_ecpriPcid_to_vf(direction, cc_id, ant_id));
2405 p_xran_dev_ctx->tx_mbufs[0].len = 0;
2407 } /* while(section) */
2412 int32_t xran_process_tx_sym(void *arg)
2417 uint32_t mlogVar[10];
2418 uint32_t mlogVarCnt = 0;
2420 unsigned long t1 = MLogTick();
2422 void *pHandle = NULL;
2425 uint8_t num_eAxc = 0;
2426 uint8_t num_CCPorts = 0;
2427 uint32_t frame_id = 0;
2428 uint32_t subframe_id = 0;
2429 uint32_t slot_id = 0;
2430 uint32_t sym_id = 0;
2431 uint32_t sym_idx = 0;
2434 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2435 enum xran_in_period inPeriod;
2437 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
2440 /* O-RU: send symb after OTA time with delay (UL) */
2441 /* O-DU: send symb in advance of OTA time (DL) */
2442 sym_idx = XranOffsetSym(p_xran_dev_ctx->sym_up, xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT*SLOTNUM_PER_SUBFRAME*1000, &inPeriod);
2444 tti = XranGetTtiNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2445 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
2446 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
2448 uint16_t sfnSecStart = xran_getSfnSecStart();
2449 if (unlikely(inPeriod == XRAN_IN_NEXT_PERIOD))
2452 sfnSecStart = (sfnSecStart + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
2454 else if (unlikely(inPeriod == XRAN_IN_PREV_PERIOD))
2457 if (sfnSecStart >= NUM_OF_FRAMES_PER_SECOND)
2459 sfnSecStart -= NUM_OF_FRAMES_PER_SECOND;
2463 sfnSecStart += NUM_OF_FRAMES_PER_SFN_PERIOD - NUM_OF_FRAMES_PER_SECOND;
2466 frame_id = XranGetFrameNum(tti,sfnSecStart,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
2467 // ORAN frameId, 8 bits, [0, 255]
2468 frame_id = (frame_id & 0xff);
2470 sym_id = XranGetSymNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2471 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
2473 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
2476 mlogVar[mlogVarCnt++] = 0xAAAAAAAA;
2477 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx;
2478 mlogVar[mlogVarCnt++] = sym_idx;
2479 mlogVar[mlogVarCnt++] = abs(p_xran_dev_ctx->sym_up);
2480 mlogVar[mlogVarCnt++] = tti;
2481 mlogVar[mlogVarCnt++] = frame_id;
2482 mlogVar[mlogVarCnt++] = subframe_id;
2483 mlogVar[mlogVarCnt++] = slot_id;
2484 mlogVar[mlogVarCnt++] = sym_id;
2485 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
2488 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU && xran_get_ru_category(pHandle) == XRAN_CATEGORY_B) {
2489 num_eAxc = xran_get_num_eAxcUl(pHandle);
2491 num_eAxc = xran_get_num_eAxc(pHandle);
2494 num_CCPorts = xran_get_num_cc(pHandle);
2496 for(ant_id = 0; ant_id < num_eAxc; ant_id++) {
2497 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
2498 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU && p_xran_dev_ctx->enableCP){
2499 retval = xran_process_tx_sym_cp_on(ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id);
2501 retval = xran_process_tx_sym_cp_off(ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id, (ant_id == (num_eAxc - 1)));
2503 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
2504 } /* for(ant_id = 0; ant_id < num_eAxc; ant_id++) */
2506 MLogTask(PID_PROCESS_TX_SYM, t1, MLogTick());
2510 int xran_packet_and_dpdk_timer_thread(void *args)
2512 struct xran_ethdi_ctx *const ctx = xran_ethdi_get_ctx();
2514 uint64_t prev_tsc = 0;
2515 uint64_t cur_tsc = rte_rdtsc();
2516 uint64_t diff_tsc = cur_tsc - prev_tsc;
2518 struct sched_param sched_param;
2520 printf("%s [CPU %2d] [PID: %6d]\n", __FUNCTION__, rte_lcore_id(), getpid());
2522 memset(&sched_param, 0, sizeof(struct sched_param));
2523 sched_param.sched_priority = XRAN_THREAD_DEFAULT_PRIO;
2525 if ((res = pthread_setschedparam(pthread_self(), SCHED_FIFO, &sched_param)))
2527 printf("priority is not changed: coreId = %d, result1 = %d\n",rte_lcore_id(), res);
2532 cur_tsc = rte_rdtsc();
2533 diff_tsc = cur_tsc - prev_tsc;
2534 if (diff_tsc > TIMER_RESOLUTION_CYCLES) {
2539 if (XRAN_STOPPED == xran_if_current_state)
2543 printf("Closing pkts timer thread...\n");
2548 int32_t xran_init(int argc, char *argv[],
2549 struct xran_fh_init *p_xran_fh_init, char *appName, void ** pXranLayerHandle)
2554 struct xran_io_cfg *p_io_cfg = (struct xran_io_cfg *)&p_xran_fh_init->io_cfg;
2555 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2557 int32_t lcore_id = 0;
2559 int64_t offset_sec, offset_nsec;
2561 memset(p_xran_dev_ctx, 0, sizeof(struct xran_device_ctx));
2564 p_xran_dev_ctx->fh_init = *p_xran_fh_init;
2566 printf(" %s: MTU %d\n", __FUNCTION__, p_xran_dev_ctx->fh_init.mtu);
2568 xran_if_current_state = XRAN_INIT;
2570 memcpy(&(p_xran_dev_ctx->eAxc_id_cfg), &(p_xran_fh_init->eAxCId_conf), sizeof(struct xran_eaxcid_config));
2572 p_xran_dev_ctx->enableCP = p_xran_fh_init->enableCP;
2573 p_xran_dev_ctx->enablePrach = p_xran_fh_init->prachEnable;
2574 p_xran_dev_ctx->enableSrs = p_xran_fh_init->srsEnable;
2575 p_xran_dev_ctx->DynamicSectionEna = p_xran_fh_init->DynamicSectionEna;
2577 /* To make sure to set default functions */
2578 p_xran_dev_ctx->send_upmbuf2ring = NULL;
2579 p_xran_dev_ctx->send_cpmbuf2ring = NULL;
2581 xran_register_ethertype_handler(ETHER_TYPE_ECPRI, handle_ecpri_ethertype);
2582 if (p_io_cfg->id == 0)
2583 xran_ethdi_init_dpdk_io(p_xran_fh_init->filePrefix,
2586 (struct rte_ether_addr *)p_xran_fh_init->p_o_du_addr,
2587 (struct rte_ether_addr *)p_xran_fh_init->p_o_ru_addr);
2589 xran_ethdi_init_dpdk_io(p_xran_fh_init->filePrefix,
2592 (struct rte_ether_addr *)p_xran_fh_init->p_o_ru_addr,
2593 (struct rte_ether_addr *)p_xran_fh_init->p_o_du_addr);
2595 for(i = 0; i < 10; i++ )
2596 rte_timer_init(&tti_to_phy_timer[i]);
2598 rte_timer_init(&sym_timer);
2599 for (i = 0; i< MAX_NUM_OF_DPDK_TIMERS; i++)
2600 rte_timer_init(&dpdk_timer[i]);
2602 p_xran_dev_ctx->direct_pool = socket_direct_pool;
2603 p_xran_dev_ctx->indirect_pool = socket_indirect_pool;
2605 for (i = 0; i< XRAN_MAX_SECTOR_NR; i++){
2606 for (j = 0; j< XRAN_NUM_OF_SYMBOL_PER_SLOT; j++){
2607 LIST_INIT (&p_xran_dev_ctx->sym_cb_list_head[i][j]);
2612 for (uint32_t nCellIdx = 0; nCellIdx < XRAN_MAX_SECTOR_NR; nCellIdx++){
2613 xran_fs_clear_slot_type(nCellIdx);
2616 *pXranLayerHandle = p_xran_dev_ctx;
2618 if(p_xran_fh_init->GPS_Alpha || p_xran_fh_init->GPS_Beta ){
2619 offset_sec = p_xran_fh_init->GPS_Beta / 100; //resolution of beta is 10ms
2620 offset_nsec = (p_xran_fh_init->GPS_Beta - offset_sec * 100) * 1e7 + p_xran_fh_init->GPS_Alpha;
2621 p_xran_dev_ctx->offset_sec = offset_sec;
2622 p_xran_dev_ctx->offset_nsec = offset_nsec;
2624 p_xran_dev_ctx->offset_sec = 0;
2625 p_xran_dev_ctx->offset_nsec = 0;
2631 int32_t xran_sector_get_instances (void * pDevHandle, uint16_t nNumInstances,
2632 xran_cc_handle_t * pSectorInstanceHandles)
2634 xran_status_t nStatus = XRAN_STATUS_FAIL;
2635 struct xran_device_ctx *pDev = (struct xran_device_ctx *)pDevHandle;
2636 XranSectorHandleInfo *pCcHandle = NULL;
2639 /* Check for the Valid Parameters */
2640 CHECK_NOT_NULL (pSectorInstanceHandles, XRAN_STATUS_INVALID_PARAM);
2642 if (!nNumInstances) {
2643 print_dbg("Instance is not assigned for this function !!! \n");
2644 return XRAN_STATUS_INVALID_PARAM;
2647 for (i = 0; i < nNumInstances; i++) {
2649 /* Allocate Memory for CC handles */
2650 pCcHandle = (XranSectorHandleInfo *) _mm_malloc( /*"xran_cc_handles",*/ sizeof (XranSectorHandleInfo), 64);
2652 if(pCcHandle == NULL)
2653 return XRAN_STATUS_RESOURCE;
2655 memset (pCcHandle, 0, (sizeof (XranSectorHandleInfo)));
2657 pCcHandle->nIndex = i;
2658 pCcHandle->nXranPort = pDev->xran_port_id;
2660 printf("%s [%d]: CC %d handle %p\n", __FUNCTION__, pDev->xran_port_id, i, pCcHandle);
2661 pLibInstanceHandles[pDev->xran_port_id][i] = pSectorInstanceHandles[i] = pCcHandle;
2663 printf("Handle: %p Instance: %p\n",
2664 &pSectorInstanceHandles[i], pSectorInstanceHandles[i]);
2667 return XRAN_STATUS_SUCCESS;
2670 int32_t xran_mm_init (void * pHandle, uint64_t nMemorySize,
2671 uint32_t nMemorySegmentSize)
2673 /* we use mbuf from dpdk memory */
2677 int32_t xran_bm_init (void * pHandle, uint32_t * pPoolIndex, uint32_t nNumberOfBuffers, uint32_t nBufferSize)
2679 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2680 uint32_t nAllocBufferSize;
2682 char pool_name[RTE_MEMPOOL_NAMESIZE];
2684 snprintf(pool_name, RTE_MEMPOOL_NAMESIZE, "ru_%d_cc_%d_idx_%d",
2685 pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex);
2687 nAllocBufferSize = nBufferSize + sizeof(struct rte_ether_hdr) +
2688 sizeof (struct xran_ecpri_hdr) +
2689 sizeof (struct radio_app_common_hdr) +
2690 sizeof(struct data_section_hdr) + 256;
2692 if(nAllocBufferSize >= UINT16_MAX) {
2693 rte_panic("nAllocBufferSize is failed [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d nAllocBufferSize %d\n",
2694 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize, nAllocBufferSize);
2698 printf("%s: [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d\n", pool_name,
2699 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize);
2701 pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex] = rte_pktmbuf_pool_create(pool_name, nNumberOfBuffers,
2702 MBUF_CACHE, 0, nAllocBufferSize, rte_socket_id());
2704 if(pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex] == NULL){
2705 rte_panic("rte_pktmbuf_pool_create failed [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d errno %s\n",
2706 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize, rte_strerror(rte_errno));
2710 pXranCc->bufferPoolElmSz[pXranCc->nBufferPoolIndex] = nBufferSize;
2711 pXranCc->bufferPoolNumElm[pXranCc->nBufferPoolIndex] = nNumberOfBuffers;
2713 printf("CC:[ handle %p ru %d cc_idx %d ] [nPoolIndex %d] mb pool %p \n",
2714 pXranCc, pXranCc->nXranPort, pXranCc->nIndex,
2715 pXranCc->nBufferPoolIndex, pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex]);
2717 *pPoolIndex = pXranCc->nBufferPoolIndex++;
2722 int32_t xran_bm_allocate_buffer(void * pHandle, uint32_t nPoolIndex, void **ppData, void **ppCtrl)
2724 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2728 struct rte_mbuf * mb = rte_pktmbuf_alloc(pXranCc->p_bufferPool[nPoolIndex]);
2731 char * start = rte_pktmbuf_append(mb, pXranCc->bufferPoolElmSz[nPoolIndex]);
2732 char * ethhdr = rte_pktmbuf_prepend(mb, sizeof(struct rte_ether_hdr));
2734 if(start && ethhdr){
2735 char * iq_offset = rte_pktmbuf_mtod(mb, char * );
2737 iq_offset = iq_offset + sizeof(struct rte_ether_hdr) +
2738 sizeof (struct xran_ecpri_hdr) +
2739 sizeof (struct radio_app_common_hdr) +
2740 sizeof(struct data_section_hdr);
2742 if (0) /* if compression */
2743 iq_offset += sizeof (struct data_section_compression_hdr);
2745 *ppData = (void *)iq_offset;
2746 *ppCtrl = (void *)mb;
2748 print_err("[nPoolIndex %d] start ethhdr failed \n", nPoolIndex );
2752 print_err("[nPoolIndex %d] mb alloc failed \n", nPoolIndex );
2756 if (*ppData == NULL){
2757 print_err("[nPoolIndex %d] rte_pktmbuf_append for %d failed \n", nPoolIndex, pXranCc->bufferPoolElmSz[nPoolIndex]);
2764 int32_t xran_bm_free_buffer(void * pHandle, void *pData, void *pCtrl)
2766 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2769 rte_pktmbuf_free(pCtrl);
2774 int32_t xran_5g_fronthault_config (void * pHandle,
2775 struct xran_buffer_list *pSrcBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2776 struct xran_buffer_list *pSrcCpBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2777 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2778 struct xran_buffer_list *pDstCpBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2779 xran_transport_callback_fn pCallback,
2782 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2784 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2786 print_dbg("%s\n",__FUNCTION__);
2790 printf("Handle is NULL!\n");
2791 return XRAN_STATUS_FAIL;
2794 if (pCallback == NULL)
2796 printf ("no callback\n");
2797 return XRAN_STATUS_FAIL;
2800 i = pXranCc->nIndex;
2802 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2804 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2807 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].bValid = 0;
2808 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2809 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2810 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2811 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2812 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulTxBuffers[j][i][z][0];
2814 if(pSrcBuffer[z][j])
2815 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList = *pSrcBuffer[z][j];
2818 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
2819 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2820 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2821 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2822 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2823 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulTxPrbMapBuffers[j][i][z][0];
2825 if(pSrcCpBuffer[z][j])
2826 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList = *pSrcCpBuffer[z][j];
2830 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].bValid = 0;
2831 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2832 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2833 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2834 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2835 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulRxBuffers[j][i][z][0];
2837 if(pDstBuffer[z][j])
2838 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2841 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
2842 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2843 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2844 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2845 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2846 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulRxPrbMapBuffers[j][i][z][0];
2848 if(pDstCpBuffer[z][j])
2849 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList = *pDstCpBuffer[z][j];
2854 p_xran_dev_ctx->pCallback[i] = pCallback;
2855 p_xran_dev_ctx->pCallbackTag[i] = pCallbackTag;
2857 p_xran_dev_ctx->xran2phy_mem_ready = 1;
2859 return XRAN_STATUS_SUCCESS;
2862 int32_t xran_5g_prach_req (void * pHandle,
2863 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2864 xran_transport_callback_fn pCallback,
2867 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2869 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2873 printf("Handle is NULL!\n");
2874 return XRAN_STATUS_FAIL;
2876 if (pCallback == NULL)
2878 printf ("no callback\n");
2879 return XRAN_STATUS_FAIL;
2882 i = pXranCc->nIndex;
2884 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2886 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2887 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].bValid = 0;
2888 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2889 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2890 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2891 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_MAX_ANTENNA_NR; // ant number.
2892 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFHPrachRxBuffers[j][i][z][0];
2893 if(pDstBuffer[z][j])
2894 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2898 p_xran_dev_ctx->pPrachCallback[i] = pCallback;
2899 p_xran_dev_ctx->pPrachCallbackTag[i] = pCallbackTag;
2901 return XRAN_STATUS_SUCCESS;
2905 int32_t xran_5g_srs_req (void * pHandle,
2906 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN],
2907 xran_transport_callback_fn pCallback,
2910 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2912 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2916 printf("Handle is NULL!\n");
2917 return XRAN_STATUS_FAIL;
2919 if (pCallback == NULL)
2921 printf ("no callback\n");
2922 return XRAN_STATUS_FAIL;
2925 i = pXranCc->nIndex;
2927 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2929 for(z = 0; z < XRAN_MAX_ANT_ARRAY_ELM_NR; z++){
2930 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].bValid = 0;
2931 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2932 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2933 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2934 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_MAX_ANT_ARRAY_ELM_NR; // ant number.
2935 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFHSrsRxBuffers[j][i][z][0];
2936 if(pDstBuffer[z][j])
2937 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2941 p_xran_dev_ctx->pSrsCallback[i] = pCallback;
2942 p_xran_dev_ctx->pSrsCallbackTag[i] = pCallbackTag;
2944 return XRAN_STATUS_SUCCESS;
2947 uint32_t xran_get_time_stats(uint64_t *total_time, uint64_t *used_time, uint32_t *core_used, uint32_t clear)
2949 *total_time = xran_total_tick;
2950 *used_time = xran_used_tick;
2951 *core_used = xran_core_used;
2955 xran_total_tick = 0;
2962 void * xran_malloc(size_t buf_len)
2964 return rte_malloc("External buffer", buf_len, RTE_CACHE_LINE_SIZE);
2967 void xran_free(void *addr)
2969 return rte_free(addr);
2973 uint8_t *xran_add_cp_hdr_offset(uint8_t *dst)
2975 dst += (RTE_PKTMBUF_HEADROOM +
2976 sizeof(struct xran_ecpri_hdr) +
2977 sizeof(struct xran_cp_radioapp_section1_header) +
2978 sizeof(struct xran_cp_radioapp_section1));
2980 dst = RTE_PTR_ALIGN_CEIL(dst, 64);
2985 uint8_t *xran_add_hdr_offset(uint8_t *dst, int16_t compMethod)
2987 dst+= (RTE_PKTMBUF_HEADROOM +
2988 sizeof (struct xran_ecpri_hdr) +
2989 sizeof (struct radio_app_common_hdr) +
2990 sizeof(struct data_section_hdr));
2992 if(compMethod != XRAN_COMPMETHOD_NONE)
2993 dst += sizeof (struct data_section_compression_hdr);
2995 dst = RTE_PTR_ALIGN_CEIL(dst, 64);
3000 int32_t xran_open(void *pHandle, struct xran_fh_config* pConf)
3003 uint8_t nNumerology = 0;
3004 int32_t lcore_id = 0;
3005 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
3006 struct xran_fh_config *pFhCfg;
3007 pFhCfg = &(p_xran_dev_ctx->fh_cfg);
3009 memcpy(pFhCfg, pConf, sizeof(struct xran_fh_config));
3011 if(pConf->log_level)
3012 printf(" %s: %s Category %s\n", __FUNCTION__,
3013 (pFhCfg->ru_conf.xranTech == XRAN_RAN_5GNR) ? "5G NR" : "LTE",
3014 (pFhCfg->ru_conf.xranCat == XRAN_CATEGORY_A) ? "A" : "B");
3016 nNumerology = xran_get_conf_numerology(pHandle);
3018 if (pConf->nCC > XRAN_MAX_SECTOR_NR)
3020 if(pConf->log_level)
3021 printf("Number of cells %d exceeds max number supported %d!\n", pConf->nCC, XRAN_MAX_SECTOR_NR);
3022 pConf->nCC = XRAN_MAX_SECTOR_NR;
3025 if(pConf->ru_conf.iqOrder != XRAN_I_Q_ORDER
3026 || pConf->ru_conf.byteOrder != XRAN_NE_BE_BYTE_ORDER ){
3028 print_err("Byte order and/or IQ order is not supported [IQ %d byte %d]\n", pConf->ru_conf.iqOrder, pConf->ru_conf.byteOrder);
3029 return XRAN_STATUS_FAIL;
3032 /* setup PRACH configuration for C-Plane */
3033 if(pConf->ru_conf.xranTech == XRAN_RAN_5GNR)
3034 xran_init_prach(pConf, p_xran_dev_ctx);
3035 else if (pConf->ru_conf.xranTech == XRAN_RAN_LTE)
3036 xran_init_prach_lte(pConf, p_xran_dev_ctx);
3038 xran_init_srs(pConf, p_xran_dev_ctx);
3040 xran_cp_init_sectiondb(pHandle);
3041 xran_init_sectionid(pHandle);
3042 xran_init_seqid(pHandle);
3044 if(pConf->ru_conf.xran_max_frame) {
3045 xran_max_frame = pConf->ru_conf.xran_max_frame;
3046 printf("xran_max_frame %d\n", xran_max_frame);
3049 interval_us = xran_fs_get_tti_interval(nNumerology);
3051 if(pConf->log_level){
3052 printf("%s: interval_us=%ld\n", __FUNCTION__, interval_us);
3054 timing_set_numerology(nNumerology);
3056 for(i = 0 ; i <pConf->nCC; i++){
3057 xran_fs_set_slot_type(i, pConf->frame_conf.nFrameDuplexType, pConf->frame_conf.nTddPeriod,
3058 pConf->frame_conf.sSlotConfig);
3061 xran_fs_slot_limit_init(xran_fs_get_tti_interval(nNumerology));
3063 if(xran_ethdi_get_ctx()->io_cfg.bbdev_mode != XRAN_BBDEV_NOT_USED){
3064 p_xran_dev_ctx->bbdev_dec = pConf->bbdev_dec;
3065 p_xran_dev_ctx->bbdev_enc = pConf->bbdev_enc;
3068 /* if send_xpmbuf2ring needs to be changed from default functions,
3069 * then those should be set between xran_init and xran_open */
3070 if(p_xran_dev_ctx->send_cpmbuf2ring == NULL)
3071 p_xran_dev_ctx->send_cpmbuf2ring = xran_ethdi_mbuf_send_cp;
3072 if(p_xran_dev_ctx->send_upmbuf2ring == NULL)
3073 p_xran_dev_ctx->send_upmbuf2ring = xran_ethdi_mbuf_send;
3075 /* Start packet processing thread */
3076 if((uint16_t)xran_ethdi_get_ctx()->io_cfg.port[XRAN_UP_VF] != 0xFFFF &&
3077 (uint16_t)xran_ethdi_get_ctx()->io_cfg.port[XRAN_CP_VF] != 0xFFFF ){
3078 if(/*pConf->log_level*/1){
3079 printf("XRAN_UP_VF: 0x%04x\n", xran_ethdi_get_ctx()->io_cfg.port[XRAN_UP_VF]);
3080 printf("XRAN_CP_VF: 0x%04x\n", xran_ethdi_get_ctx()->io_cfg.port[XRAN_CP_VF]);
3084 if (rte_eal_remote_launch(xran_timing_source_thread, xran_dev_get_ctx(), xran_ethdi_get_ctx()->io_cfg.timing_core))
3085 rte_panic("thread_run() failed to start\n");
3087 /* Start packet processing thread */
3088 if(xran_ethdi_get_ctx()->io_cfg.pkt_proc_core){
3089 /* start pkt workers */
3090 uint64_t nWorkerCore = 1LL;
3091 uint32_t coreNum = sysconf(_SC_NPROCESSORS_CONF);
3092 for (i = 0; i < coreNum; i++) {
3093 if (nWorkerCore & (uint64_t)xran_ethdi_get_ctx()->io_cfg.pkt_proc_core) {
3094 if (rte_eal_remote_launch(ring_processing_thread, NULL, i))
3095 rte_panic("ring_processing_thread() failed to start\n");
3096 xran_ethdi_get_ctx()->pkt_wrk_cfg[i].f = ring_processing_thread;
3097 xran_ethdi_get_ctx()->pkt_wrk_cfg[i].arg = NULL;
3098 xran_ethdi_get_ctx()->pkt_wrk_cfg[i].state = 1;
3099 if(p_xran_dev_ctx->pkt_proc_core_id == 0)
3100 p_xran_dev_ctx->pkt_proc_core_id = i;
3102 nWorkerCore = nWorkerCore << 1;
3105 } else if(pConf->log_level){
3106 printf("Eth port was not open. Processing thread was not started\n");
3112 int32_t xran_start(void *pHandle)
3114 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
3115 if(xran_get_if_state() == XRAN_RUNNING) {
3116 print_err("Already STARTED!!");
3120 if(p_xran_dev_ctx->fh_init.debugStop){
3121 printf("Set debug stop %d, debug stop count %d\n", p_xran_dev_ctx->fh_init.debugStop, p_xran_dev_ctx->fh_init.debugStopCount);
3122 timing_set_debug_stop(p_xran_dev_ctx->fh_init.debugStop, p_xran_dev_ctx->fh_init.debugStopCount);
3125 xran_if_current_state = XRAN_RUNNING;
3129 int32_t xran_stop(void *pHandle)
3131 if(xran_get_if_state() == XRAN_STOPPED) {
3132 print_err("Already STOPPED!!");
3136 xran_if_current_state = XRAN_STOPPED;
3140 int32_t xran_close(void *pHandle)
3142 xran_if_current_state = XRAN_STOPPED;
3143 //TODO: fix memory leak xran_cp_free_sectiondb(pHandle);
3144 //rte_eal_mp_wait_lcore();
3145 //xran_ethdi_ports_stats();
3147 #ifdef RTE_LIBRTE_PDUMP
3148 /* uninitialize packet capture framework */
3154 int32_t xran_mm_destroy (void * pHandle)
3156 if(xran_get_if_state() == XRAN_RUNNING) {
3157 print_err("Please STOP first !!");
3161 /* functionality is not yet implemented */
3165 int32_t xran_reg_sym_cb(void *pHandle, xran_callback_sym_fn symCb, void * symCbParam, uint8_t symb, uint8_t ant)
3167 if(xran_get_if_state() == XRAN_RUNNING) {
3168 print_err("Cannot register callback while running!!\n");
3172 /* functionality is not yet implemented */
3173 print_err("Functionality is not yet implemented !");
3177 int32_t xran_reg_physide_cb(void *pHandle, xran_fh_tti_callback_fn Cb, void *cbParam, int skipTtiNum, enum callback_to_phy_id id)
3179 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
3181 if(xran_get_if_state() == XRAN_RUNNING) {
3182 print_err("Cannot register callback while running!!\n");
3186 p_xran_dev_ctx->ttiCb[id] = Cb;
3187 p_xran_dev_ctx->TtiCbParam[id] = cbParam;
3188 p_xran_dev_ctx->SkipTti[id] = skipTtiNum;
3193 /* send_cpmbuf2ring and send_upmbuf2ring should be set between xran_init and xran_open
3194 * each cb will be set by default duing open if it is set by NULL */
3195 int xran_register_cb_mbuf2ring(xran_ethdi_mbuf_send_fn mbuf_send_cp, xran_ethdi_mbuf_send_fn mbuf_send_up)
3197 struct xran_device_ctx *p_xran_dev_ctx;
3199 if(xran_get_if_state() == XRAN_RUNNING) {
3200 print_err("Cannot register callback while running!!\n");
3204 p_xran_dev_ctx = xran_dev_get_ctx();
3206 p_xran_dev_ctx->send_cpmbuf2ring = mbuf_send_cp;
3207 p_xran_dev_ctx->send_upmbuf2ring = mbuf_send_up;
3213 int32_t xran_get_slot_idx (uint32_t *nFrameIdx, uint32_t *nSubframeIdx, uint32_t *nSlotIdx, uint64_t *nSecond)
3217 tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
3218 *nSlotIdx = (uint32_t)XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
3219 *nSubframeIdx = (uint32_t)XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
3220 *nFrameIdx = (uint32_t)XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
3221 *nSecond = timing_get_current_second();
3228 * @brief Get the configuration of eAxC ID
3230 * @return the pointer of configuration
3232 inline struct xran_eaxcid_config *xran_get_conf_eAxC(void *pHandle)
3234 return (&(xran_dev_get_ctx()->eAxc_id_cfg));
3238 * @brief Get the configuration of the total number of beamforming weights on RU
3240 * @return Configured the number of beamforming weights
3242 inline uint8_t xran_get_conf_num_bfweights(void *pHandle)
3244 return (xran_dev_get_ctx()->fh_init.totalBfWeights);
3248 * @brief Get the configuration of subcarrier spacing for PRACH
3250 * @return subcarrier spacing value for PRACH
3252 inline uint8_t xran_get_conf_prach_scs(void *pHandle)
3254 return (xran_lib_get_ctx_fhcfg()->prach_conf.nPrachSubcSpacing);
3258 * @brief Get the configuration of FFT size for RU
3260 * @return FFT size value for RU
3262 inline uint8_t xran_get_conf_fftsize(void *pHandle)
3264 return (xran_lib_get_ctx_fhcfg()->ru_conf.fftSize);
3268 * @brief Get the configuration of nummerology
3270 * @return Configured numerology
3272 inline uint8_t xran_get_conf_numerology(void *pHandle)
3274 return (xran_lib_get_ctx_fhcfg()->frame_conf.nNumerology);
3278 * @brief Get the configuration of IQ bit width for RU
3280 * @return IQ bit width for RU
3282 inline uint8_t xran_get_conf_iqwidth(void *pHandle)
3284 struct xran_fh_config *pFhCfg;
3286 pFhCfg = xran_lib_get_ctx_fhcfg();
3287 return ((pFhCfg->ru_conf.iqWidth==16)?0:pFhCfg->ru_conf.iqWidth);
3291 * @brief Get the configuration of compression method for RU
3293 * @return Compression method for RU
3295 inline uint8_t xran_get_conf_compmethod(void *pHandle)
3297 return (xran_lib_get_ctx_fhcfg()->ru_conf.compMeth);
3302 * @brief Get the configuration of the number of component carriers
3304 * @return Configured the number of component carriers
3306 inline uint8_t xran_get_num_cc(void *pHandle)
3308 return (xran_lib_get_ctx_fhcfg()->nCC);
3312 * @brief Get the configuration of the number of antenna for UL
3314 * @return Configured the number of antenna
3316 inline uint8_t xran_get_num_eAxc(void *pHandle)
3318 return (xran_lib_get_ctx_fhcfg()->neAxc);
3322 * @brief Get configuration of O-RU (Cat A or Cat B)
3324 * @return Configured the number of antenna
3326 inline enum xran_category xran_get_ru_category(void *pHandle)
3328 return (xran_lib_get_ctx_fhcfg()->ru_conf.xranCat);
3332 * @brief Get the configuration of the number of antenna
3334 * @return Configured the number of antenna
3336 inline uint8_t xran_get_num_eAxcUl(void *pHandle)
3338 return (xran_lib_get_ctx_fhcfg()->neAxcUl);
3342 * @brief Get the configuration of the number of antenna elements
3344 * @return Configured the number of antenna
3346 inline uint8_t xran_get_num_ant_elm(void *pHandle)
3348 return (xran_lib_get_ctx_fhcfg()->nAntElmTRx);
3351 int32_t xran_get_common_counters(void *pXranLayerHandle, struct xran_common_counters *pStats)
3353 struct xran_device_ctx* pDev = (struct xran_device_ctx*)pXranLayerHandle;
3355 if(pStats && pDev) {
3356 *pStats = pDev->fh_counters;
3357 return XRAN_STATUS_SUCCESS;
3359 return XRAN_STATUS_INVALID_PARAM;