1 /******************************************************************************
3 * Copyright (c) 2019 Intel.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
17 *******************************************************************************/
20 * @brief XRAN main functionality module
22 * @ingroup group_source_xran
23 * @author Intel Corporation
32 #include <sys/queue.h>
39 #include <rte_common.h>
41 #include <rte_errno.h>
42 #include <rte_lcore.h>
43 #include <rte_cycles.h>
44 #include <rte_memory.h>
45 #include <rte_memzone.h>
49 #include "xran_fh_o_du.h"
53 #include "xran_up_api.h"
54 #include "xran_cp_api.h"
55 #include "xran_sync_api.h"
56 #include "xran_lib_mlog_tasks_id.h"
57 #include "xran_timer.h"
58 #include "xran_common.h"
59 #include "xran_frame_struct.h"
60 #include "xran_printf.h"
61 #include "xran_app_frag.h"
63 #include "xran_mlog_lnx.h"
65 #define DIV_ROUND_OFFSET(X,Y) ( X/Y + ((X%Y)?1:0) )
67 #define XranOffsetSym(offSym, otaSym, numSymTotal) (((int32_t)offSym > (int32_t)otaSym) ? \
68 ((int32_t)otaSym + ((int32_t)numSymTotal) - (uint32_t)offSym) : \
69 (((int32_t)otaSym - (int32_t)offSym) >= numSymTotal) ? \
70 (((int32_t)otaSym - (int32_t)offSym) - numSymTotal) : \
71 ((int32_t)otaSym - (int32_t)offSym))
73 #define MAX_NUM_OF_XRAN_CTX (2)
74 #define XranIncrementCtx(ctx) ((ctx >= (MAX_NUM_OF_XRAN_CTX-1)) ? 0 : (ctx+1))
75 #define XranDecrementCtx(ctx) ((ctx == 0) ? (MAX_NUM_OF_XRAN_CTX-1) : (ctx-1))
77 #define MAX_NUM_OF_DPDK_TIMERS (10)
78 #define DpdkTimerIncrementCtx(ctx) ((ctx >= (MAX_NUM_OF_DPDK_TIMERS-1)) ? 0 : (ctx+1))
79 #define DpdkTimerDecrementCtx(ctx) ((ctx == 0) ? (MAX_NUM_OF_DPDK_TIMERS-1) : (ctx-1))
81 /* Difference between Unix seconds to GPS seconds
82 GPS epoch: 1980.1.6 00:00:00 (UTC); Unix time epoch: 1970:1.1 00:00:00 UTC
83 Value is calculated on Sep.6 2019. Need to be change if International
84 Earth Rotation and Reference Systems Service (IERS) adds more leap seconds
85 1970:1.1 - 1980.1.6: 3657 days
86 3657*24*3600=315 964 800 seconds (unix seconds value at 1980.1.6 00:00:00 (UTC))
87 There are 18 leap seconds inserted after 1980.1.6 00:00:00 (UTC), which means
88 GPS is 18 larger. 315 964 800 - 18 = 315 964 782
90 #define UNIX_TO_GPS_SECONDS_OFFSET 315964782UL
91 #define NUM_OF_FRAMES_PER_SECOND 100
93 //#define XRAN_CREATE_RBMAP /**< generate slot map base on symbols */
96 struct xran_timer_ctx {
97 uint32_t tti_to_process;
100 static xran_cc_handle_t pLibInstanceHandles[XRAN_PORTS_NUM][XRAN_MAX_SECTOR_NR] = {NULL};
101 static struct xran_device_ctx g_xran_dev_ctx[XRAN_PORTS_NUM] = { 0 };
103 struct xran_timer_ctx timer_ctx[MAX_NUM_OF_XRAN_CTX];
105 static struct rte_timer tti_to_phy_timer[10];
106 static struct rte_timer sym_timer;
107 static struct rte_timer dpdk_timer[MAX_NUM_OF_DPDK_TIMERS];
109 uint64_t interval_us = 1000;
111 uint32_t xran_lib_ota_tti = 0; /**< Slot index in a second [0:(1000000/TTI-1)] */
112 uint32_t xran_lib_ota_sym = 0; /**< Symbol index in a slot [0:13] */
113 uint32_t xran_lib_ota_sym_idx = 0; /**< Symbol index in a second [0 : 14*(1000000/TTI)-1]
114 where TTI is TTI interval in microseconds */
115 uint16_t xran_SFN_at_Sec_Start = 0; /**< SFN at current second start */
116 uint16_t xran_max_frame = 1023; /**< value of max frame used. expected to be 99 (old compatibility mode) and 1023 as per section 9.7.2 System Frame Number Calculation */
118 static uint8_t xran_cp_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_DIR_MAX][XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR]; /* XRAN_MAX_ANTENNA_NR * 2 for PUSCH and PRACH */
119 static uint8_t xran_updl_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR];
120 static uint8_t xran_upul_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR]; /**< PUSCH, PRACH, SRS for Cat B */
122 static uint8_t xran_section_id_curslot[XRAN_DIR_MAX][XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2+ XRAN_MAX_ANT_ARRAY_ELM_NR];
123 static uint16_t xran_section_id[XRAN_DIR_MAX][XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2+ XRAN_MAX_ANT_ARRAY_ELM_NR];
124 static uint64_t xran_total_tick = 0, xran_used_tick = 0;
125 static uint32_t xran_core_used = 0;
126 static int32_t first_call = 0;
130 extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
134 static struct rte_mbuf_ext_shared_info share_data[XRAN_N_FE_BUF_LEN];
136 void xran_timer_arm(struct rte_timer *tim, void* arg);
138 int32_t xran_process_tx_sym(void *arg);
140 int32_t xran_process_rx_sym(void *arg,
141 struct rte_mbuf *mbuf,
157 int32_t xran_process_prach_sym(void *arg,
158 struct rte_mbuf *mbuf,
174 int32_t xran_process_srs_sym(void *arg,
175 struct rte_mbuf *mbuf,
192 void tti_ota_cb(struct rte_timer *tim, void *arg);
193 void tti_to_phy_cb(struct rte_timer *tim, void *arg);
194 void xran_timer_arm_ex(struct rte_timer *tim, void* CbFct, void *CbArg, unsigned tim_lcore);
196 // Return SFN at current second start, 10 bits, [0, 1023]
197 static inline uint16_t xran_getSfnSecStart(void)
199 return xran_SFN_at_Sec_Start;
201 void xran_updateSfnSecStart(void)
203 uint64_t currentSecond = timing_get_current_second();
204 // Assume always positive
205 uint64_t gpsSecond = currentSecond - UNIX_TO_GPS_SECONDS_OFFSET;
206 uint64_t nFrames = gpsSecond * NUM_OF_FRAMES_PER_SECOND;
207 uint16_t sfn = (uint16_t)(nFrames % (xran_max_frame + 1));
208 xran_SFN_at_Sec_Start = sfn;
210 tx_bytes_per_sec = tx_bytes_counter;
211 rx_bytes_per_sec = rx_bytes_counter;
212 tx_bytes_counter = 0;
213 rx_bytes_counter = 0;
216 static inline int32_t xran_getSlotIdxSecond(void)
218 int32_t frameIdxSecond = xran_getSfnSecStart();
219 int32_t slotIndxSecond = frameIdxSecond * SLOTS_PER_SYSTEMFRAME;
220 return slotIndxSecond;
223 struct xran_device_ctx *xran_dev_get_ctx(void)
225 return &g_xran_dev_ctx[0];
228 static inline struct xran_fh_config *xran_lib_get_ctx_fhcfg(void)
230 return (&(xran_dev_get_ctx()->fh_cfg));
233 uint16_t xran_get_beamid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id)
235 return (0); // NO BEAMFORMING
238 enum xran_if_state xran_get_if_state(void)
240 return xran_if_current_state;
243 int xran_is_prach_slot(uint32_t subframe_id, uint32_t slot_id)
245 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
246 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
247 int32_t is_prach_slot = 0;
249 if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology < 2){
250 //for FR1, in 38.211 tab 6.3.3.2-2&3 it is subframe index
251 if (pPrachCPConfig->isPRACHslot[subframe_id] == 1){
252 if (pPrachCPConfig->nrofPrachInSlot != 1)
255 if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology == 0)
257 else if (slot_id == 1)
261 } else if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology == 3){
262 //for FR2, 38.211 tab 6.3.3.4 it is slot index of 60kHz slot
264 slotidx = subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
265 if (pPrachCPConfig->nrofPrachInSlot == 2){
266 if (pPrachCPConfig->isPRACHslot[slotidx>>1] == 1)
269 if ((pPrachCPConfig->isPRACHslot[slotidx>>1] == 1) && ((slotidx % 2) == 1)){
274 print_err("Numerology %d not supported", p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology);
275 return is_prach_slot;
278 int xran_init_sectionid(void *pHandle)
282 for (dir = 0; dir < XRAN_DIR_MAX; dir++){
283 for(cell=0; cell < XRAN_MAX_CELLS_PER_PORT; cell++) {
284 for(ant=0; ant < XRAN_MAX_ANTENNA_NR; ant++) {
285 xran_section_id[dir][cell][ant] = 0;
286 xran_section_id_curslot[dir][cell][ant] = 255;
294 int xran_init_srs(struct xran_fh_config* pConf, struct xran_device_ctx * p_xran_dev_ctx)
296 struct xran_srs_config *p_srs = &(p_xran_dev_ctx->srs_cfg);
299 p_srs->symbMask = pConf->srs_conf.symbMask;
300 p_srs->eAxC_offset = pConf->srs_conf.eAxC_offset;
301 print_dbg("SRS sym %d\n", p_srs->symbMask );
302 print_dbg("SRS eAxC_offset %d\n", p_srs->eAxC_offset);
304 return (XRAN_STATUS_SUCCESS);
308 int xran_init_prach(struct xran_fh_config* pConf, struct xran_device_ctx * p_xran_dev_ctx)
312 struct xran_prach_config* pPRACHConfig = &(pConf->prach_conf);
313 const xRANPrachConfigTableStruct *pxRANPrachConfigTable;
314 uint8_t nNumerology = pConf->frame_conf.nNumerology;
315 uint8_t nPrachConfIdx = pPRACHConfig->nPrachConfIdx;
316 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
319 pxRANPrachConfigTable = &gxranPrachDataTable_mmw[nPrachConfIdx];
320 else if (pConf->frame_conf.nFrameDuplexType == 1)
321 pxRANPrachConfigTable = &gxranPrachDataTable_sub6_tdd[nPrachConfIdx];
323 pxRANPrachConfigTable = &gxranPrachDataTable_sub6_fdd[nPrachConfIdx];
325 uint8_t preambleFmrt = pxRANPrachConfigTable->preambleFmrt[0];
326 const xRANPrachPreambleLRAStruct *pxranPreambleforLRA = &gxranPreambleforLRA[preambleFmrt];
327 memset(pPrachCPConfig, 0, sizeof(struct xran_prach_cp_config));
329 printf("xRAN open PRACH config: Numerology %u ConfIdx %u, preambleFmrt %u startsymb %u, numSymbol %u, occassionsInPrachSlot %u\n", nNumerology, nPrachConfIdx, preambleFmrt, pxRANPrachConfigTable->startingSym, pxRANPrachConfigTable->duration, pxRANPrachConfigTable->occassionsInPrachSlot);
331 pPrachCPConfig->filterIdx = XRAN_FILTERINDEX_PRACH_ABC; // 3, PRACH preamble format A1~3, B1~4, C0, C2
332 pPrachCPConfig->startSymId = pxRANPrachConfigTable->startingSym;
333 pPrachCPConfig->startPrbc = pPRACHConfig->nPrachFreqStart;
334 pPrachCPConfig->numPrbc = (preambleFmrt >= FORMAT_A1)? 12 : 70;
335 pPrachCPConfig->timeOffset = pxranPreambleforLRA->nRaCp;
336 pPrachCPConfig->freqOffset = xran_get_freqoffset(pPRACHConfig->nPrachFreqOffset, pPRACHConfig->nPrachSubcSpacing);
337 pPrachCPConfig->x = pxRANPrachConfigTable->x;
338 pPrachCPConfig->nrofPrachInSlot = pxRANPrachConfigTable->nrofPrachInSlot;
339 pPrachCPConfig->y[0] = pxRANPrachConfigTable->y[0];
340 pPrachCPConfig->y[1] = pxRANPrachConfigTable->y[1];
341 if (preambleFmrt >= FORMAT_A1)
343 pPrachCPConfig->numSymbol = pxRANPrachConfigTable->duration;
344 pPrachCPConfig->occassionsInPrachSlot = pxRANPrachConfigTable->occassionsInPrachSlot;
348 pPrachCPConfig->numSymbol = 1;
349 pPrachCPConfig->occassionsInPrachSlot = 1;
353 printf("PRACH: x %u y[0] %u, y[1] %u prach slot: %u ..", pPrachCPConfig->x, pPrachCPConfig->y[0], pPrachCPConfig->y[1], pxRANPrachConfigTable->slotNr[0]);
354 pPrachCPConfig->isPRACHslot[pxRANPrachConfigTable->slotNr[0]] = 1;
355 for (i=1; i < XRAN_PRACH_CANDIDATE_SLOT; i++)
357 slotNr = pxRANPrachConfigTable->slotNr[i];
359 pPrachCPConfig->isPRACHslot[slotNr] = 1;
361 printf(" %u ..", slotNr);
365 for (i = 0; i < XRAN_MAX_SECTOR_NR; i++){
366 p_xran_dev_ctx->prach_start_symbol[i] = pPrachCPConfig->startSymId;
367 p_xran_dev_ctx->prach_last_symbol[i] = pPrachCPConfig->startSymId + pPrachCPConfig->numSymbol * pPrachCPConfig->occassionsInPrachSlot - 1;
369 if(pConf->log_level){
370 printf("PRACH start symbol %u lastsymbol %u\n", p_xran_dev_ctx->prach_start_symbol[0], p_xran_dev_ctx->prach_last_symbol[0]);
373 pPrachCPConfig->eAxC_offset = xran_get_num_eAxc(NULL);
374 print_dbg("PRACH eAxC_offset %d\n", pPrachCPConfig->eAxC_offset);
376 return (XRAN_STATUS_SUCCESS);
379 inline uint16_t xran_alloc_sectionid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id)
381 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
382 print_err("Invalid CC ID - %d", cc_id);
385 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) { //for PRACH, ant_id starts from num_ant
386 print_err("Invalid antenna ID - %d", ant_id);
390 /* if new slot has been started,
391 * then initializes section id again for new start */
392 if(xran_section_id_curslot[dir][cc_id][ant_id] != slot_id) {
393 xran_section_id[dir][cc_id][ant_id] = 0;
394 xran_section_id_curslot[dir][cc_id][ant_id] = slot_id;
397 return(xran_section_id[dir][cc_id][ant_id]++);
400 int xran_init_seqid(void *pHandle)
404 for(cell=0; cell < XRAN_MAX_CELLS_PER_PORT; cell++) {
405 for(dir=0; dir < XRAN_DIR_MAX; dir++) {
406 for(ant=0; ant < XRAN_MAX_ANTENNA_NR * 2; ant++)
407 xran_cp_seq_id_num[cell][dir][ant] = 0;
409 for(ant=0; ant < XRAN_MAX_ANTENNA_NR; ant++)
410 xran_updl_seq_id_num[cell][ant] = 0;
411 for(ant=0; ant < XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR; ant++)
412 xran_upul_seq_id_num[cell][ant] = 0;
418 static inline uint8_t xran_get_cp_seqid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id)
420 if(dir >= XRAN_DIR_MAX) {
421 print_err("Invalid direction - %d", dir);
424 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
425 print_err("Invalid CC ID - %d", cc_id);
428 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) {
429 print_err("Invalid antenna ID - %d", ant_id);
433 return(xran_cp_seq_id_num[cc_id][dir][ant_id]++);
435 static inline uint8_t xran_get_updl_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id)
437 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
438 print_err("Invalid CC ID - %d", cc_id);
441 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
442 print_err("Invalid antenna ID - %d", ant_id);
446 /* Only U-Plane DL needs to get sequence ID in O-DU */
447 return(xran_updl_seq_id_num[cc_id][ant_id]++);
449 static inline uint8_t *xran_get_updl_seqid_addr(void *pHandle, uint8_t cc_id, uint8_t ant_id)
451 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
452 print_err("Invalid CC ID - %d", cc_id);
455 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
456 print_err("Invalid antenna ID - %d", ant_id);
460 /* Only U-Plane DL needs to get sequence ID in O-DU */
461 return(&xran_updl_seq_id_num[cc_id][ant_id]);
463 static inline int8_t xran_check_upul_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id, uint8_t seq_id)
466 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
467 print_err("Invalid CC ID - %d", cc_id);
471 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) {
472 print_err("Invalid antenna ID - %d", ant_id);
476 /* O-DU needs to check the sequence ID of U-Plane UL from O-RU */
477 xran_upul_seq_id_num[cc_id][ant_id]++;
478 if(xran_upul_seq_id_num[cc_id][ant_id] == seq_id) { /* expected sequence */
479 return (XRAN_STATUS_SUCCESS);
481 print_err("expected seqid %u received %u, slot %u, ant %u cc %u", xran_upul_seq_id_num[cc_id][ant_id], seq_id, slot_id, ant_id, cc_id);
482 xran_upul_seq_id_num[cc_id][ant_id] = seq_id; // for next
487 //////////////////////////////////////////
489 static inline uint8_t xran_get_upul_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id)
491 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
492 print_err("Invalid CC ID - %d", cc_id);
495 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR) {
496 print_err("Invalid antenna ID - %d", ant_id);
500 return(xran_upul_seq_id_num[cc_id][ant_id]++);
502 static inline uint8_t *xran_get_upul_seqid_addr(void *pHandle, uint8_t cc_id, uint8_t ant_id)
504 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
505 print_err("Invalid CC ID - %d", cc_id);
508 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
509 print_err("Invalid antenna ID - %d", ant_id);
513 return(&xran_upul_seq_id_num[cc_id][ant_id]);
515 static inline int8_t xran_check_cp_seqid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t seq_id)
517 if(dir >= XRAN_DIR_MAX) {
518 print_err("Invalid direction - %d", dir);
521 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
522 print_err("Invalid CC ID - %d", cc_id);
525 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
526 print_err("Invalid antenna ID - %d", ant_id);
530 xran_cp_seq_id_num[cc_id][dir][ant_id]++;
531 if(xran_cp_seq_id_num[cc_id][dir][ant_id] == seq_id) { /* expected sequence */
535 xran_cp_seq_id_num[cc_id][dir][ant_id] = seq_id;
539 static inline int8_t xran_check_updl_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id, uint8_t seq_id)
541 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
542 print_err("Invalid CC ID - %d", cc_id);
546 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
547 print_err("Invalid antenna ID - %d", ant_id);
551 /* O-RU needs to check the sequence ID of U-Plane DL from O-DU */
552 xran_updl_seq_id_num[cc_id][ant_id]++;
553 if(xran_updl_seq_id_num[cc_id][ant_id] == seq_id) {
554 /* expected sequence */
555 /*print_dbg("ant %u cc_id %u : slot_id %u : seq_id %u : expected seq_id %u\n",
556 ant_id, cc_id, slot_id, seq_id, xran_updl_seq_id_num[cc_id][ant_id]);*/
559 /* print_err("ant %u cc_id %u : slot_id %u : seq_id %u : expected seq_id %u\n",
560 ant_id, cc_id, slot_id, seq_id, xran_updl_seq_id_num[cc_id][ant_id]);*/
562 xran_updl_seq_id_num[cc_id][ant_id] = seq_id;
569 static struct xran_section_gen_info cpSections[XRAN_MAX_NUM_SECTIONS];
570 static struct xran_cp_gen_params cpInfo;
571 int process_cplane(struct rte_mbuf *pkt)
573 struct xran_recv_packet_info recv;
575 cpInfo.sections = cpSections;
576 xran_parse_cp_pkt(pkt, &cpInfo, &recv);
580 //////////////////////////////////////////
582 void sym_ota_cb(struct rte_timer *tim, void *arg, unsigned long *used_tick)
584 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
585 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
586 long t1 = MLogTick(), t2;
588 static int32_t ctx = 0;
590 if(XranGetSymNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT) == 0){
592 tti_ota_cb(NULL, arg);
593 *used_tick += get_ticks_diff(xran_tick(), t3);
596 if(XranGetSymNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT) == 3){
597 if(p_xran_dev_ctx->phy_tti_cb_done == 0){
598 /* rearm timer to deliver TTI event to PHY */
600 p_xran_dev_ctx->phy_tti_cb_done = 0;
601 xran_timer_arm_ex(&tti_to_phy_timer[xran_lib_ota_tti % 10], tti_to_phy_cb, (void*)pTCtx, p_xran_dev_ctx->fh_init.io_cfg.timing_core);
602 *used_tick += get_ticks_diff(xran_tick(), t3);
607 if (xran_process_tx_sym(timer_ctx))
609 *used_tick += get_ticks_diff(xran_tick(), t3);
612 /* check if there is call back to do something else on this symbol */
614 struct cb_elem_entry *cb_elm;
615 LIST_FOREACH(cb_elm, &p_xran_dev_ctx->sym_cb_list_head[0][xran_lib_ota_sym], pointers){
617 cb_elm->pSymCallback(&dpdk_timer[ctx], cb_elm->pSymCallbackTag);
618 ctx = DpdkTimerIncrementCtx(ctx);
622 // This counter is incremented in advance before it is the time for the next symbol
624 if(xran_lib_ota_sym >= N_SYM_PER_SLOT){
629 MLogTask(PID_SYM_OTA_CB, t1, t2);
632 void tti_ota_cb(struct rte_timer *tim, void *arg)
634 uint32_t frame_id = 0;
635 uint32_t subframe_id = 0;
636 uint32_t slot_id = 0;
637 uint32_t next_tti = 0;
639 uint32_t mlogVar[10];
640 uint32_t mlogVarCnt = 0;
641 uint64_t t1 = MLogTick();
643 uint32_t reg_tti = 0;
644 uint32_t reg_sfn = 0;
645 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
646 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
648 MLogTask(PID_TTI_TIMER, t1, MLogTick());
651 if(xran_lib_ota_tti == 0)
652 reg_tti = xran_fs_get_max_slot() - 1;
654 reg_tti = xran_lib_ota_tti -1;
655 MLogIncrementCounter();
656 reg_sfn = XranGetFrameNum(reg_tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME)*10 + XranGetSubFrameNum(reg_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);;
657 /* subframe and slot */
658 MLogRegisterFrameSubframe(reg_sfn, reg_tti % (SLOTNUM_PER_SUBFRAME));
661 slot_id = XranGetSlotNum(xran_lib_ota_tti, SLOTNUM_PER_SUBFRAME);
662 subframe_id = XranGetSubFrameNum(xran_lib_ota_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
663 frame_id = XranGetFrameNum(xran_lib_ota_tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
665 pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process = xran_lib_ota_tti;
667 mlogVar[mlogVarCnt++] = 0x11111111;
668 mlogVar[mlogVarCnt++] = xran_lib_ota_tti;
669 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx;
670 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx / 14;
671 mlogVar[mlogVarCnt++] = frame_id;
672 mlogVar[mlogVarCnt++] = subframe_id;
673 mlogVar[mlogVarCnt++] = slot_id;
674 mlogVar[mlogVarCnt++] = 0;
675 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
677 if(p_xran_dev_ctx->fh_init.io_cfg.id == ID_LLS_CU)
678 next_tti = xran_lib_ota_tti + 1;
680 next_tti = xran_lib_ota_tti;
682 if(next_tti>= xran_fs_get_max_slot()){
683 print_dbg("[%d]SFN %d sf %d slot %d\n",next_tti, frame_id, subframe_id, slot_id);
687 slot_id = XranGetSlotNum(next_tti, SLOTNUM_PER_SUBFRAME);
688 subframe_id = XranGetSubFrameNum(next_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
689 frame_id = XranGetFrameNum(next_tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
691 print_dbg("[%d]SFN %d sf %d slot %d\n",next_tti, frame_id, subframe_id, slot_id);
693 if(p_xran_dev_ctx->fh_init.io_cfg.id == ID_LLS_CU){
694 pTCtx[(xran_lib_ota_tti & 1)].tti_to_process = next_tti;
696 pTCtx[(xran_lib_ota_tti & 1)].tti_to_process = pTCtx[(xran_lib_ota_tti & 1)^1].tti_to_process;
699 p_xran_dev_ctx->phy_tti_cb_done = 0;
700 xran_timer_arm_ex(&tti_to_phy_timer[xran_lib_ota_tti % 10], tti_to_phy_cb, (void*)pTCtx, p_xran_dev_ctx->fh_init.io_cfg.timing_core);
702 //slot index is increased to next slot at the beginning of current OTA slot
704 if(xran_lib_ota_tti >= xran_fs_get_max_slot()){
705 print_dbg("[%d]SFN %d sf %d slot %d\n",xran_lib_ota_tti, frame_id, subframe_id, slot_id);
708 MLogTask(PID_TTI_CB, t1, MLogTick());
711 void xran_timer_arm(struct rte_timer *tim, void* arg)
713 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
714 uint64_t t3 = MLogTick();
716 if (xran_if_current_state == XRAN_RUNNING){
717 rte_timer_cb_t fct = (rte_timer_cb_t)arg;
719 rte_timer_reset_sync(tim, 0, SINGLE, p_xran_dev_ctx->fh_init.io_cfg.timing_core, fct, &timer_ctx[0]);
721 MLogTask(PID_TIME_ARM_TIMER, t3, MLogTick());
724 void xran_timer_arm_ex(struct rte_timer *tim, void* CbFct, void *CbArg, unsigned tim_lcore)
726 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
727 uint64_t t3 = MLogTick();
729 if (xran_if_current_state == XRAN_RUNNING){
730 rte_timer_cb_t fct = (rte_timer_cb_t)CbFct;
732 rte_timer_reset_sync(tim, 0, SINGLE, tim_lcore, fct, CbArg);
734 MLogTask(PID_TIME_ARM_TIMER, t3, MLogTick());
737 int xran_cp_create_and_send_section(void *pHandle, uint8_t ru_port_id, int dir, int tti, int cc_id,
738 struct xran_prb_map *prbMap, enum xran_category category, uint8_t ctx_id)
740 struct xran_device_ctx *p_x_ctx = xran_dev_get_ctx();
741 struct xran_cp_gen_params params;
742 struct xran_section_gen_info sect_geninfo[1];
743 struct rte_mbuf *mbuf;
745 uint32_t i, j, loc_sym;
746 uint32_t nsection = 0;
747 struct xran_prb_elm *pPrbMapElem = NULL;
748 struct xran_prb_elm *pPrbMapElemPrev = NULL;
749 uint32_t slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
750 uint32_t subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
751 uint32_t frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
753 frame_id = (frame_id & 0xff); /* ORAN frameId, 8 bits, [0, 255] */
756 struct xran_sectionext1_info m_ext1;
759 nsection = prbMap->nPrbElm;
760 pPrbMapElem = &prbMap->prbMap[0];
762 print_dbg("cp[%d:%d:%d] ru_port_id %d dir=%d nsection %d\n",
763 frame_id, subframe_id, slot_id, ru_port_id, dir, nsection);
766 print_err("prbMap is NULL\n");
769 for (i=0; i<nsection; i++)
771 pPrbMapElem = &prbMap->prbMap[i];
773 params.sectionType = XRAN_CP_SECTIONTYPE_1; /* Most DL/UL Radio Channels */
774 params.hdr.filterIdx = XRAN_FILTERINDEX_STANDARD;
775 params.hdr.frameId = frame_id;
776 params.hdr.subframeId = subframe_id;
777 params.hdr.slotId = slot_id;
778 params.hdr.startSymId = pPrbMapElem->nStartSymb;
779 params.hdr.iqWidth = pPrbMapElem->iqWidth; /*xran_get_conf_iqwidth(pHandle);*/
780 params.hdr.compMeth = pPrbMapElem->compMethod;
782 print_dbg("cp[%d:%d:%d] ru_port_id %d dir=%d\n",
783 frame_id, subframe_id, slot_id, ru_port_id, dir);
785 seq_id = xran_get_cp_seqid(pHandle, XRAN_DIR_DL, cc_id, ru_port_id);
787 sect_geninfo[0].info.type = params.sectionType; // for database
788 sect_geninfo[0].info.startSymId = params.hdr.startSymId; // for database
789 sect_geninfo[0].info.iqWidth = params.hdr.iqWidth; // for database
790 sect_geninfo[0].info.compMeth = params.hdr.compMeth; // for database
791 sect_geninfo[0].info.id = i; /*xran_alloc_sectionid(pHandle, dir, cc_id, ru_port_id, slot_id);*/
793 if(sect_geninfo[0].info.id > 7)
794 print_err("sectinfo->id %d\n", sect_geninfo[0].info.id);
796 if (dir == XRAN_DIR_UL) {
797 for (loc_sym = 0; loc_sym < XRAN_NUM_OF_SYMBOL_PER_SLOT; loc_sym++){
798 struct xran_section_desc *p_sec_desc = pPrbMapElem->p_sec_desc[loc_sym];
800 p_sec_desc->section_id = sect_geninfo[0].info.id;
801 if(p_sec_desc->pCtrl) {
802 rte_pktmbuf_free(p_sec_desc->pCtrl);
803 p_sec_desc->pCtrl = NULL;
804 p_sec_desc->pData = NULL;
807 print_err("section desc is NULL\n");
812 sect_geninfo[0].info.rb = XRAN_RBIND_EVERY;
813 sect_geninfo[0].info.startPrbc = pPrbMapElem->nRBStart;
814 sect_geninfo[0].info.numPrbc = pPrbMapElem->nRBSize;
815 sect_geninfo[0].info.numSymbol = pPrbMapElem->numSymb;
816 sect_geninfo[0].info.reMask = 0xfff;
817 sect_geninfo[0].info.beamId = pPrbMapElem->nBeamIndex;
819 for (loc_sym = 0; loc_sym < XRAN_NUM_OF_SYMBOL_PER_SLOT; loc_sym++){
820 struct xran_section_desc *p_sec_desc = pPrbMapElem->p_sec_desc[loc_sym];
822 p_sec_desc->section_id = sect_geninfo[0].info.id;
824 sect_geninfo[0].info.sec_desc[loc_sym].iq_buffer_offset = p_sec_desc->iq_buffer_offset;
825 sect_geninfo[0].info.sec_desc[loc_sym].iq_buffer_len = p_sec_desc->iq_buffer_len;
827 print_err("section desc is NULL\n");
832 sect_geninfo[0].info.symInc = XRAN_SYMBOLNUMBER_NOTINC;
835 pPrbMapElemPrev = &prbMap->prbMap[i-1];
836 if (pPrbMapElemPrev->nStartSymb == pPrbMapElem->nStartSymb)
838 sect_geninfo[0].info.symInc = XRAN_SYMBOLNUMBER_NOTINC;
839 if (pPrbMapElemPrev->numSymb != pPrbMapElem->numSymb)
840 print_err("section info error: previous numSymb %d not equal to current numSymb %d\n", pPrbMapElemPrev->numSymb, pPrbMapElem->numSymb);
844 sect_geninfo[0].info.symInc = XRAN_SYMBOLNUMBER_INC;
845 if (pPrbMapElem->nStartSymb != (pPrbMapElemPrev->nStartSymb + pPrbMapElemPrev->numSymb))
846 print_err("section info error: current startSym %d not equal to previous endSymb %d\n", pPrbMapElem->nStartSymb, pPrbMapElemPrev->nStartSymb + pPrbMapElemPrev->numSymb);
850 if(category == XRAN_CATEGORY_A){
851 /* no extention sections for category */
852 sect_geninfo[0].info.ef = 0;
853 sect_geninfo[0].exDataSize = 0;
854 } else if (category == XRAN_CATEGORY_B) {
855 /*add extantion section for BF Weights if update is needed */
856 if(pPrbMapElem->bf_weight_update){
857 memset(&m_ext1, 0, sizeof (struct xran_sectionext1_info));
858 m_ext1.bfwNumber = pPrbMapElem->bf_weight.nAntElmTRx;
859 m_ext1.bfwiqWidth = pPrbMapElem->iqWidth;
860 m_ext1.bfwCompMeth = pPrbMapElem->compMethod;
861 m_ext1.p_bfwIQ = (int16_t*)pPrbMapElem->bf_weight.p_ext_section;
862 m_ext1.bfwIQ_sz = pPrbMapElem->bf_weight.ext_section_sz;
864 sect_geninfo[0].exData[0].type = XRAN_CP_SECTIONEXTCMD_1;
865 sect_geninfo[0].exData[0].len = sizeof(m_ext1);
866 sect_geninfo[0].exData[0].data = &m_ext1;
868 sect_geninfo[0].info.ef = 1;
869 sect_geninfo[0].exDataSize = 1;
871 sect_geninfo[0].info.ef = 0;
872 sect_geninfo[0].exDataSize = 0;
875 print_err("Unsupported Category %d\n", category);
879 params.numSections = 1;//nsection;
880 params.sections = sect_geninfo;
882 mbuf = xran_ethdi_mbuf_alloc();
883 if(unlikely(mbuf == NULL)) {
884 print_err("Alloc fail!\n");
888 ret = xran_prepare_ctrl_pkt(mbuf, ¶ms, cc_id, ru_port_id, seq_id);
890 print_err("Fail to build control plane packet - [%d:%d:%d] dir=%d\n",
891 frame_id, subframe_id, slot_id, dir);
893 /* add in the ethernet header */
894 struct ether_hdr *const h = (void *)rte_pktmbuf_prepend(mbuf, sizeof(*h));
896 tx_bytes_counter += rte_pktmbuf_pkt_len(mbuf);
897 p_x_ctx->send_cpmbuf2ring(mbuf, ETHER_TYPE_ECPRI);
899 /*for(i=0; i<nsection; i++)*/
900 xran_cp_add_section_info(pHandle,
901 dir, cc_id, ru_port_id,
903 §_geninfo[0].info);
910 void tx_cp_dl_cb(struct rte_timer *tim, void *arg)
912 long t1 = MLogTick();
915 uint32_t slot_id, subframe_id, frame_id;
918 uint8_t ant_id, num_eAxc, num_CCPorts;
921 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
922 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
924 pHandle = NULL; // TODO: temp implemantation
925 num_eAxc = xran_get_num_eAxc(pHandle);
926 num_CCPorts = xran_get_num_cc(pHandle);
928 if(first_call && p_xran_dev_ctx->enableCP) {
930 tti = pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process;
931 buf_id = tti % XRAN_N_FE_BUF_LEN;
933 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
934 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
935 frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
937 /* Wrap around to next second */
938 frame_id = (frame_id + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
941 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
943 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
944 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
945 for(cc_id = 0; cc_id < num_CCPorts; cc_id++ ) {
946 /* start new section information list */
947 xran_cp_reset_section_info(pHandle, XRAN_DIR_DL, cc_id, ant_id, ctx_id);
948 if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_DL) == 1) {
949 if(p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData){
950 num_list = xran_cp_create_and_send_section(pHandle, ant_id, XRAN_DIR_DL, tti, cc_id,
951 (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData,
952 p_xran_dev_ctx->fh_cfg.ru_conf.xranCat, ctx_id);
954 print_err("[%d]SFN %d sf %d slot %d: ant_id %d cc_id %d \n", tti, frame_id, subframe_id, slot_id, ant_id, cc_id);
956 } /* if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_DL) == 1) */
957 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
958 } /* for(ant_id = 0; ant_id < num_eAxc; ++ant_id) */
959 MLogTask(PID_CP_DL_CB, t1, MLogTick());
963 void rx_ul_deadline_half_cb(struct rte_timer *tim, void *arg)
965 long t1 = MLogTick();
966 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
967 xran_status_t status;
968 /* half of RX for current TTI as measured against current OTA time */
969 int32_t rx_tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
972 uint32_t nSubframeIdx;
976 uint32_t nXranTime = xran_get_slot_idx(&nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
977 rx_tti = nFrameIdx*SUBFRAMES_PER_SYSTEMFRAME*SLOTNUM_PER_SUBFRAME
978 + nSubframeIdx*SLOTNUM_PER_SUBFRAME
981 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
984 for(cc_id = 0; cc_id < xran_get_num_cc(p_xran_dev_ctx); cc_id++) {
985 if(p_xran_dev_ctx->rx_packet_callback_tracker[rx_tti % XRAN_N_FE_BUF_LEN][cc_id] == 0){
986 struct xran_cb_tag *pTag = p_xran_dev_ctx->pCallbackTag[cc_id];
987 pTag->slotiId = rx_tti;
988 pTag->symbol = 0; /* last 7 sym means full slot of Symb */
989 status = XRAN_STATUS_SUCCESS;
990 if(p_xran_dev_ctx->pCallback[cc_id])
991 p_xran_dev_ctx->pCallback[cc_id](p_xran_dev_ctx->pCallbackTag[cc_id], status);
993 p_xran_dev_ctx->rx_packet_callback_tracker[rx_tti % XRAN_N_FE_BUF_LEN][cc_id] = 0;
996 MLogTask(PID_UP_UL_HALF_DEAD_LINE_CB, t1, MLogTick());
999 void rx_ul_deadline_full_cb(struct rte_timer *tim, void *arg)
1001 long t1 = MLogTick();
1002 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1003 xran_status_t status = 0;
1004 int32_t rx_tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1007 uint32_t nSubframeIdx;
1011 uint32_t nXranTime = xran_get_slot_idx(&nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
1012 rx_tti = nFrameIdx*SUBFRAMES_PER_SYSTEMFRAME*SLOTNUM_PER_SUBFRAME
1013 + nSubframeIdx*SLOTNUM_PER_SUBFRAME
1017 rx_tti = (xran_fs_get_max_slot_SFN()-1);
1019 rx_tti -= 1; /* end of RX for prev TTI as measured against current OTA time */
1021 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1025 for(cc_id = 0; cc_id < xran_get_num_cc(p_xran_dev_ctx); cc_id++) {
1026 struct xran_cb_tag *pTag = p_xran_dev_ctx->pCallbackTag[cc_id];
1027 pTag->slotiId = rx_tti;
1028 pTag->symbol = 7; /* last 7 sym means full slot of Symb */
1029 status = XRAN_STATUS_SUCCESS;
1030 if(p_xran_dev_ctx->pCallback[cc_id])
1031 p_xran_dev_ctx->pCallback[cc_id](p_xran_dev_ctx->pCallbackTag[cc_id], status);
1033 if(p_xran_dev_ctx->pPrachCallback[cc_id]){
1034 struct xran_cb_tag *pTag = p_xran_dev_ctx->pPrachCallbackTag[cc_id];
1035 pTag->slotiId = rx_tti;
1036 pTag->symbol = 7; /* last 7 sym means full slot of Symb */
1037 p_xran_dev_ctx->pPrachCallback[cc_id](p_xran_dev_ctx->pPrachCallbackTag[cc_id], status);
1041 MLogTask(PID_UP_UL_FULL_DEAD_LINE_CB, t1, MLogTick());
1045 void tx_cp_ul_cb(struct rte_timer *tim, void *arg)
1047 long t1 = MLogTick();
1050 uint32_t slot_id, subframe_id, frame_id;
1052 int ant_id, prach_port_id;
1054 uint8_t num_eAxc, num_CCPorts;
1060 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1061 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
1062 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
1064 pHandle = NULL; // TODO: temp implemantation
1066 if(xran_get_ru_category(pHandle) == XRAN_CATEGORY_A)
1067 num_eAxc = xran_get_num_eAxc(pHandle);
1069 num_eAxc = xran_get_num_eAxcUl(pHandle);
1071 num_CCPorts = xran_get_num_cc(pHandle);
1072 tti = pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process;
1073 buf_id = tti % XRAN_N_FE_BUF_LEN;
1074 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
1075 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
1076 frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
1078 //Wrap around to next second
1079 frame_id = (frame_id + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
1081 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
1083 if(first_call && p_xran_dev_ctx->enableCP) {
1085 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
1087 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
1088 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1089 if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_UL) == 1 ||
1090 xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_SP) == 1 ){
1091 /* start new section information list */
1092 xran_cp_reset_section_info(pHandle, XRAN_DIR_UL, cc_id, ant_id, ctx_id);
1093 num_list = xran_cp_create_and_send_section(pHandle, ant_id, XRAN_DIR_UL, tti, cc_id,
1094 (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData,
1095 p_xran_dev_ctx->fh_cfg.ru_conf.xranCat, ctx_id);
1096 } /* if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_UL) == 1 */
1097 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
1098 } /* for(ant_id = 0; ant_id < num_eAxc; ++ant_id) */
1100 if(p_xran_dev_ctx->enablePrach) {
1101 uint32_t is_prach_slot = xran_is_prach_slot(subframe_id, slot_id);
1102 if(((frame_id % pPrachCPConfig->x) == pPrachCPConfig->y[0]) && (is_prach_slot==1)) { //is prach slot
1103 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
1104 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1105 struct xran_cp_gen_params params;
1106 struct xran_section_gen_info sect_geninfo[8];
1107 struct rte_mbuf *mbuf = xran_ethdi_mbuf_alloc();
1108 prach_port_id = ant_id + num_eAxc;
1109 /* start new section information list */
1110 xran_cp_reset_section_info(pHandle, XRAN_DIR_UL, cc_id, prach_port_id, ctx_id);
1112 beam_id = xran_get_beamid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id, slot_id);
1113 ret = generate_cpmsg_prach(pHandle, ¶ms, sect_geninfo, mbuf, p_xran_dev_ctx,
1114 frame_id, subframe_id, slot_id,
1115 beam_id, cc_id, prach_port_id,
1116 xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id));
1117 if (ret == XRAN_STATUS_SUCCESS)
1118 send_cpmsg(pHandle, mbuf, ¶ms, sect_geninfo,
1119 cc_id, prach_port_id, xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id));
1124 } /* if(p_xran_dev_ctx->enableCP) */
1126 MLogTask(PID_CP_UL_CB, t1, MLogTick());
1129 void ul_up_full_slot_cb(struct rte_timer *tim, void *arg)
1131 long t1 = MLogTick();
1133 MLogTask(PID_TTI_CB_TO_PHY, t1, MLogTick());
1136 void tti_to_phy_cb(struct rte_timer *tim, void *arg)
1138 long t1 = MLogTick();
1139 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1141 p_xran_dev_ctx->phy_tti_cb_done = 1; /* DPDK called CB */
1143 if(p_xran_dev_ctx->ttiCb[XRAN_CB_TTI]){
1144 if(p_xran_dev_ctx->SkipTti[XRAN_CB_TTI] <= 0){
1145 p_xran_dev_ctx->ttiCb[XRAN_CB_TTI](p_xran_dev_ctx->TtiCbParam[XRAN_CB_TTI]);
1147 p_xran_dev_ctx->SkipTti[XRAN_CB_TTI]--;
1151 if(p_xran_dev_ctx->ttiCb[XRAN_CB_TTI]){
1152 int32_t tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1153 uint32_t slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
1154 uint32_t subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
1155 uint32_t frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
1156 if((frame_id == xran_max_frame)&&(subframe_id==9)&&(slot_id == SLOTNUM_PER_SUBFRAME-1)) { //(tti == xran_fs_get_max_slot()-1)
1162 MLogTask(PID_TTI_CB_TO_PHY, t1, MLogTick());
1165 int xran_timing_source_thread(void *args)
1169 int32_t do_reset = 0;
1172 int32_t result1,i,j;
1173 uint32_t delay_cp_dl;
1174 uint32_t delay_cp_ul;
1176 uint32_t delay_up_ul;
1177 uint32_t delay_cp2up;
1182 struct sched_param sched_param;
1183 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1184 uint64_t tWake = 0, tWakePrev = 0, tUsed = 0;
1185 struct cb_elem_entry * cb_elm = NULL;
1187 /* ToS = Top of Second start +- 1.5us */
1192 xran_core_used = rte_lcore_id();
1193 printf("%s [CPU %2d] [PID: %6d]\n", __FUNCTION__, rte_lcore_id(), getpid());
1195 /* set main thread affinity mask to CPU2 */
1196 sched_param.sched_priority = 98;
1199 CPU_SET(p_xran_dev_ctx->fh_init.io_cfg.timing_core, &cpuset);
1200 if (result1 = pthread_setaffinity_np(pthread_self(), sizeof(cpu_set_t), &cpuset))
1202 printf("pthread_setaffinity_np failed: coreId = 2, result1 = %d\n",result1);
1204 if ((result1 = pthread_setschedparam(pthread_self(), 1, &sched_param)))
1206 printf("priority is not changed: coreId = 2, result1 = %d\n",result1);
1209 if (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
1211 timespec_get(&ts, TIME_UTC);
1212 }while (ts.tv_nsec >1500);
1213 struct tm * ptm = gmtime(&ts.tv_sec);
1215 strftime(buff, sizeof buff, "%D %T", ptm);
1216 printf("O-DU: thread_run start time: %s.%09ld UTC [%ld]\n", buff, ts.tv_nsec, interval_us);
1219 delay_cp_dl = interval_us - p_xran_dev_ctx->fh_init.T1a_max_cp_dl;
1220 delay_cp_ul = interval_us - p_xran_dev_ctx->fh_init.T1a_max_cp_ul;
1221 delay_up = p_xran_dev_ctx->fh_init.T1a_max_up;
1222 delay_up_ul = p_xran_dev_ctx->fh_init.Ta4_max;
1224 delay_cp2up = delay_up-delay_cp_dl;
1226 sym_cp_dl = delay_cp_dl*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1227 sym_cp_ul = delay_cp_ul*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1228 sym_up_ul = delay_up_ul*1000/(interval_us*1000/N_SYM_PER_SLOT);
1229 p_xran_dev_ctx->sym_up = sym_up = -(delay_up*1000/(interval_us*1000/N_SYM_PER_SLOT));
1230 p_xran_dev_ctx->sym_up_ul = sym_up_ul = (delay_up_ul*1000/(interval_us*1000/N_SYM_PER_SLOT)+1);
1232 printf("Start C-plane DL %d us after TTI [trigger on sym %d]\n", delay_cp_dl, sym_cp_dl);
1233 printf("Start C-plane UL %d us after TTI [trigger on sym %d]\n", delay_cp_ul, sym_cp_ul);
1234 printf("Start U-plane DL %d us before OTA [offset in sym %d]\n", delay_up, sym_up);
1235 printf("Start U-plane UL %d us OTA [offset in sym %d]\n", delay_up_ul, sym_up_ul);
1237 printf("C-plane to U-plane delay %d us after TTI\n", delay_cp2up);
1238 printf("Start Sym timer %ld ns\n", TX_TIMER_INTERVAL/N_SYM_PER_SLOT);
1240 cb_elm = xran_create_cb(xran_timer_arm, tx_cp_dl_cb);
1242 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_cp_dl],
1246 print_err("cb_elm is NULL\n");
1251 cb_elm = xran_create_cb(xran_timer_arm, tx_cp_ul_cb);
1253 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_cp_ul],
1257 print_err("cb_elm is NULL\n");
1262 /* Full slot UL OTA + delay_up_ul */
1263 cb_elm = xran_create_cb(xran_timer_arm, rx_ul_deadline_full_cb);
1265 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_up_ul],
1269 print_err("cb_elm is NULL\n");
1274 /* Half slot UL OTA + delay_up_ul*/
1275 cb_elm = xran_create_cb(xran_timer_arm, rx_ul_deadline_half_cb);
1277 LIST_INSERT_HEAD(&p_xran_dev_ctx->sym_cb_list_head[0][sym_up_ul + N_SYM_PER_SLOT/2],
1281 print_err("cb_elm is NULL\n");
1285 } else { // APP_O_RU
1286 /* calcualte when to send UL U-plane */
1287 delay_up = p_xran_dev_ctx->fh_init.Ta3_min;
1288 p_xran_dev_ctx->sym_up = sym_up = delay_up*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1289 printf("Start UL U-plane %d us after OTA [offset in sym %d]\n", delay_up, sym_up);
1291 timespec_get(&ts, TIME_UTC);
1292 }while (ts.tv_nsec >1500);
1293 struct tm * ptm = gmtime(&ts.tv_sec);
1295 strftime(buff, sizeof buff, "%D %T", ptm);
1296 printf("RU: thread_run start time: %s.%09ld UTC [%ld]\n", buff, ts.tv_nsec, interval_us);
1300 printf("interval_us %ld\n", interval_us);
1302 timespec_get(&ts, TIME_UTC);
1303 }while (ts.tv_nsec == 0);
1306 /* Update Usage Stats */
1307 tWake = xran_tick();
1308 xran_used_tick += tUsed;
1311 xran_total_tick += get_ticks_diff(tWake, tWakePrev);
1316 delta = poll_next_tick(interval_us*1000L/N_SYM_PER_SLOT, &tUsed);
1317 if (XRAN_STOPPED == xran_if_current_state)
1320 if (likely(XRAN_RUNNING == xran_if_current_state))
1321 sym_ota_cb(&sym_timer, timer_ctx, &tUsed);
1325 for (i = 0; i< XRAN_MAX_SECTOR_NR; i++){
1326 for (j = 0; j< XRAN_NUM_OF_SYMBOL_PER_SLOT; j++){
1327 struct cb_elem_entry *cb_elm;
1328 LIST_FOREACH(cb_elm, &p_xran_dev_ctx->sym_cb_list_head[i][j], pointers){
1330 LIST_REMOVE(cb_elm, pointers);
1331 xran_destroy_cb(cb_elm);
1337 printf("Closing timing source thread...tx counter %lu, rx counter %lu\n", tx_counter, rx_counter);
1341 /* Handle ecpri format. */
1342 int handle_ecpri_ethertype(struct rte_mbuf *pkt, uint64_t rx_time)
1344 const struct xran_ecpri_hdr *ecpri_hdr;
1346 int32_t ret = MBUF_FREE;
1348 if (rte_pktmbuf_data_len(pkt) < sizeof(struct xran_ecpri_hdr)) {
1349 print_err("Packet too short - %d bytes", rte_pktmbuf_data_len(pkt));
1353 /* check eCPRI header. */
1354 ecpri_hdr = rte_pktmbuf_mtod(pkt, struct xran_ecpri_hdr *);
1355 if(ecpri_hdr == NULL){
1356 print_err("ecpri_hdr error\n");
1360 rx_bytes_counter += rte_pktmbuf_pkt_len(pkt);
1361 switch(ecpri_hdr->cmnhdr.ecpri_mesg_type) {
1364 ret = process_mbuf(pkt);
1365 // MLogTask(PID_PROCESS_UP_PKT, t1, MLogTick());
1368 case ECPRI_RT_CONTROL_DATA:
1370 if(xran_dev_get_ctx()->fh_init.io_cfg.id == O_RU) {
1371 ret = process_cplane(pkt);
1373 print_err("O-DU recevied C-Plane message!");
1375 MLogTask(PID_PROCESS_CP_PKT, t1, MLogTick());
1378 print_err("Invalid eCPRI message type - %d", ecpri_hdr->cmnhdr.ecpri_mesg_type);
1384 int xran_process_prach_sym(void *arg,
1385 struct rte_mbuf *mbuf,
1386 void *iq_data_start,
1391 uint8_t subframe_id,
1395 uint16_t start_prbu,
1402 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1403 uint8_t symb_id_offset;
1405 xran_status_t status;
1406 void *pHandle = NULL;
1407 struct rte_mbuf *mb;
1409 uint16_t iq_sample_size_bits = 16;
1411 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1414 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1416 status = tti << 16 | symb_id;
1418 if(CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < XRAN_MAX_ANTENNA_NR && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT){
1419 symb_id_offset = symb_id - p_xran_dev_ctx->prach_start_symbol[CC_ID]; //make the storing of prach packets to start from 0 for easy of processing within PHY
1420 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pData;
1421 if(pos && iq_data_start && size){
1422 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1424 uint16_t *psrc = (uint16_t *)iq_data_start;
1425 uint16_t *pdst = (uint16_t *)pos;
1426 /* network byte (be) order of IQ to CPU byte order (le) */
1427 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1428 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1430 *mb_free = MBUF_FREE;
1432 mb = p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pCtrl;
1434 rte_pktmbuf_free(mb);
1436 print_err("mb==NULL\n");
1438 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pData = iq_data_start;
1439 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pCtrl = mbuf;
1440 *mb_free = MBUF_KEEP;
1443 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1446 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1449 /* if (symb_id == p_xran_dev_ctx->prach_last_symbol[CC_ID] ){
1450 p_xran_dev_ctx->rx_packet_prach_tracker[tti % XRAN_N_FE_BUF_LEN][CC_ID][symb_id]++;
1451 if(p_xran_dev_ctx->rx_packet_prach_tracker[tti % XRAN_N_FE_BUF_LEN][CC_ID][symb_id] >= xran_get_num_eAxc(pHandle)){
1452 if(p_xran_dev_ctx->pPrachCallback[0])
1453 p_xran_dev_ctx->pPrachCallback[0](p_xran_dev_ctx->pPrachCallbackTag[0], status);
1454 p_xran_dev_ctx->rx_packet_prach_tracker[tti % XRAN_N_FE_BUF_LEN][CC_ID][symb_id] = 0;
1461 int32_t xran_process_srs_sym(void *arg,
1462 struct rte_mbuf *mbuf,
1463 void *iq_data_start,
1468 uint8_t subframe_id,
1472 uint16_t start_prbu,
1479 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1481 xran_status_t status;
1482 void *pHandle = NULL;
1483 struct rte_mbuf *mb = NULL;
1485 uint16_t iq_sample_size_bits = 16;
1487 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1490 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1492 status = tti << 16 | symb_id;
1494 if(CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < p_xran_dev_ctx->fh_cfg.nAntElmTRx && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT) {
1495 pos = (char*) p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData;
1496 pos += start_prbu * N_SC_PER_PRB*(iq_sample_size_bits/8)*2;
1497 if(pos && iq_data_start && size){
1498 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1500 uint16_t *psrc = (uint16_t *)iq_data_start;
1501 uint16_t *pdst = (uint16_t *)pos;
1502 rte_panic("XRAN_CPU_LE_BYTE_ORDER is not supported 0x16%lx\n", (long)mb);
1503 /* network byte (be) order of IQ to CPU byte order (le) */
1504 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1505 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1507 } else if (likely(p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_NE_BE_BYTE_ORDER)){
1508 if (likely (p_xran_dev_ctx->fh_init.mtu >=
1509 p_xran_dev_ctx->fh_cfg.nULRBs * N_SC_PER_PRB*(iq_sample_size_bits/8)*2)) {
1510 /* no fragmentation */
1511 mb = p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl;
1513 rte_pktmbuf_free(mb);
1515 print_err("mb==NULL\n");
1517 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData = iq_data_start;
1518 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl = mbuf;
1519 *mb_free = MBUF_KEEP;
1521 /* packet can be fragmented copy RBs */
1522 rte_memcpy(pos, iq_data_start, size);
1523 *mb_free = MBUF_FREE;
1527 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1530 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1536 int32_t xran_pkt_validate(void *arg,
1537 struct rte_mbuf *mbuf,
1538 void *iq_data_start,
1543 uint8_t subframe_id,
1546 struct ecpri_seq_id *seq_id,
1548 uint16_t start_prbu,
1553 struct xran_device_ctx * pctx = xran_dev_get_ctx();
1554 struct xran_common_counters *pCnt = &pctx->fh_counters;
1556 if(pctx->fh_init.io_cfg.id == O_DU) {
1557 if(xran_check_upul_seqid(NULL, CC_ID, Ant_ID, slot_id, seq_id->seq_id) != XRAN_STATUS_SUCCESS) {
1558 pCnt->Rx_pkt_dupl++;
1559 return (XRAN_STATUS_FAIL);
1561 }else if(pctx->fh_init.io_cfg.id == O_RU) {
1562 if(xran_check_updl_seqid(NULL, CC_ID, Ant_ID, slot_id, seq_id->seq_id) != XRAN_STATUS_SUCCESS) {
1563 pCnt->Rx_pkt_dupl++;
1564 return (XRAN_STATUS_FAIL);
1567 print_err("incorrect dev type %d\n", pctx->fh_init.io_cfg.id);
1573 pCnt->Total_msgs_rcvd++;
1575 return XRAN_STATUS_SUCCESS;
1578 int32_t xran_process_rx_sym(void *arg,
1579 struct rte_mbuf *mbuf,
1580 void *iq_data_start,
1585 uint8_t subframe_id,
1589 uint16_t start_prbu,
1596 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1598 xran_status_t status;
1599 void *pHandle = NULL;
1600 struct rte_mbuf *mb = NULL;
1601 struct xran_prb_map * pRbMap = NULL;
1602 struct xran_prb_elm * prbMapElm = NULL;
1604 uint16_t iq_sample_size_bits = 16;
1606 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1608 status = tti << 16 | symb_id;
1610 if(CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < XRAN_MAX_ANTENNA_NR && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT){
1611 pos = (char*) p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData;
1612 pRbMap = (struct xran_prb_map *) p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers->pData;
1614 prbMapElm = &pRbMap->prbMap[sect_id];
1615 if(sect_id >= pRbMap->nPrbElm) {
1616 print_err("sect_id %d !=pRbMap->nPrbElm %d\n", sect_id,pRbMap->nPrbElm);
1617 *mb_free = MBUF_FREE;
1621 print_err("pRbMap==NULL\n");
1622 *mb_free = MBUF_FREE;
1626 pos += start_prbu * N_SC_PER_PRB*(iq_sample_size_bits/8)*2;
1627 if(pos && iq_data_start && size){
1628 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1630 uint16_t *psrc = (uint16_t *)iq_data_start;
1631 uint16_t *pdst = (uint16_t *)pos;
1632 rte_panic("XRAN_CPU_LE_BYTE_ORDER is not supported 0x16%lx\n", (long)mb);
1633 /* network byte (be) order of IQ to CPU byte order (le) */
1634 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1635 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1637 } else if (likely(p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_NE_BE_BYTE_ORDER)){
1638 if (/*likely (p_xran_dev_ctx->fh_init.mtu >=
1639 p_xran_dev_ctx->fh_cfg.nULRBs * N_SC_PER_PRB*(iq_sample_size_bits/8)*2)
1640 && p_xran_dev_ctx->fh_init.io_cfg.id == O_DU*/ 1) {
1641 if (pRbMap->nPrbElm == 1){
1642 /* no fragmentation */
1643 mb = p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl;
1645 rte_pktmbuf_free(mb);
1647 print_err("mb==NULL\n");
1649 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData = iq_data_start;
1650 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl = mbuf;
1651 *mb_free = MBUF_KEEP;
1653 prbMapElm = &pRbMap->prbMap[sect_id];
1654 struct xran_section_desc *p_sec_desc = prbMapElm->p_sec_desc[symb_id];
1656 mb = p_sec_desc->pCtrl;
1658 rte_pktmbuf_free(mb);
1660 p_sec_desc->pData = iq_data_start;
1661 p_sec_desc->pCtrl = mbuf;
1662 p_sec_desc->iq_buffer_len = size;
1663 p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(iq_data_start, mbuf);
1665 print_err("p_sec_desc==NULL\n");
1666 *mb_free = MBUF_FREE;
1669 *mb_free = MBUF_KEEP;
1672 /* packet can be fragmented copy RBs */
1673 rte_memcpy(pos, iq_data_start, size);
1674 *mb_free = MBUF_FREE;
1678 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1681 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1687 /* Send burst of packets on an output interface */
1689 xran_send_burst(struct xran_device_ctx *dev, uint16_t n, uint16_t port)
1691 struct rte_mbuf **m_table;
1697 m_table = (struct rte_mbuf **)dev->tx_mbufs[port].m_table;
1699 for(i = 0; i < n; i++){
1700 rte_mbuf_sanity_check(m_table[i], 0);
1701 /*rte_pktmbuf_dump(stdout, m_table[i], 256);*/
1703 tx_bytes_counter += rte_pktmbuf_pkt_len(m_table[i]);
1704 ret += dev->send_upmbuf2ring(m_table[i], ETHER_TYPE_ECPRI);
1708 if (unlikely(ret < n)) {
1709 print_err("ret < n\n");
1715 int32_t xran_process_tx_sym_cp_off(uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id,
1719 uint64_t t1 = MLogTick();
1721 void *pHandle = NULL;
1723 char *p_sec_iq = NULL;
1724 char *p_sect_iq = NULL;
1727 uint16_t iq_sample_size_bits = 16; // TODO: make dynamic per
1729 struct xran_prb_map *prb_map = NULL;
1730 uint8_t num_ant_elm = 0;
1732 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1733 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
1734 struct xran_srs_config *p_srs_cfg = &(p_xran_dev_ctx->srs_cfg);
1735 num_ant_elm = xran_get_num_ant_elm(pHandle);
1736 enum xran_pkt_dir direction;
1738 struct rte_mbuf *eth_oran_hdr = NULL;
1739 char *ext_buff = NULL;
1740 uint16_t ext_buff_len = 0;
1741 struct rte_mbuf *tmp = NULL;
1742 rte_iova_t ext_buff_iova = 0;
1744 struct rte_mbuf_ext_shared_info * p_share_data = &share_data[tti % XRAN_N_FE_BUF_LEN];
1746 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
1747 direction = XRAN_DIR_DL; /* O-DU */
1748 prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
1750 direction = XRAN_DIR_UL; /* RU */
1751 prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
1754 if(xran_fs_get_slot_type(cc_id, tti, ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SLOT_TYPE_DL : XRAN_SLOT_TYPE_UL)) == 1
1755 || xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_SP) == 1
1756 || xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_FDD) == 1){
1758 if(xran_fs_get_symbol_type(cc_id, tti, sym_id) == ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SYMBOL_TYPE_DL : XRAN_SYMBOL_TYPE_UL)
1759 || xran_fs_get_symbol_type(cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_FDD){
1761 if(iq_sample_size_bits != 16)
1762 print_err("Incorrect iqWidth %d\n", iq_sample_size_bits );
1764 pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1765 mb = (void*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
1766 prb_map = (struct xran_prb_map *) p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers->pData;
1771 for (elmIdx = 0; elmIdx < prb_map->nPrbElm; elmIdx++){
1772 uint16_t sec_id = elmIdx;
1773 struct xran_prb_elm * prb_map_elm = &prb_map->prbMap[elmIdx];
1774 struct xran_section_desc * p_sec_desc = NULL;
1776 if(prb_map_elm == NULL){
1777 rte_panic("p_sec_desc == NULL\n");
1780 p_sec_desc = prb_map_elm->p_sec_desc[sym_id];
1782 if(p_sec_desc == NULL){
1783 rte_panic("p_sec_desc == NULL\n");
1787 p_sec_iq = ((char*)pos + p_sec_desc->iq_buffer_offset);
1789 /* calculete offset for external buffer */
1790 ext_buff_len = p_sec_desc->iq_buffer_len;
1791 ext_buff = p_sec_iq - (RTE_PKTMBUF_HEADROOM +
1792 sizeof (struct xran_ecpri_hdr) +
1793 sizeof (struct radio_app_common_hdr) +
1794 sizeof(struct data_section_hdr));
1796 ext_buff_len += RTE_PKTMBUF_HEADROOM +
1797 sizeof (struct xran_ecpri_hdr) +
1798 sizeof (struct radio_app_common_hdr) +
1799 sizeof(struct data_section_hdr) + 18;
1801 if(prb_map_elm->compMethod != XRAN_COMPMETHOD_NONE){
1802 ext_buff -= sizeof (struct data_section_compression_hdr);
1803 ext_buff_len += sizeof (struct data_section_compression_hdr);
1806 eth_oran_hdr = rte_pktmbuf_alloc(_eth_mbuf_pool_small);
1808 if (unlikely (( eth_oran_hdr) == NULL)) {
1809 rte_panic("Failed rte_pktmbuf_alloc\n");
1812 p_share_data->free_cb = extbuf_free_callback;
1813 p_share_data->fcb_opaque = NULL;
1814 rte_mbuf_ext_refcnt_set(p_share_data, 1);
1816 ext_buff_iova = rte_mempool_virt2iova(mb);
1817 if (unlikely (( ext_buff_iova) == 0)) {
1818 rte_panic("Failed rte_mem_virt2iova \n");
1821 if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
1822 rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
1825 rte_pktmbuf_attach_extbuf(eth_oran_hdr,
1827 ext_buff_iova + RTE_PTR_DIFF(ext_buff , mb),
1831 rte_pktmbuf_reset_headroom(eth_oran_hdr);
1833 tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(eth_oran_hdr, sizeof(struct ether_hdr));
1834 if (unlikely (( tmp) == NULL)) {
1835 rte_panic("Failed rte_pktmbuf_prepend \n");
1839 /* first all PRBs */
1840 prepare_symbol_ex(direction, sec_id,
1842 (struct rb_map *)p_sec_iq,
1843 prb_map_elm->compMethod,
1844 prb_map_elm->iqWidth,
1845 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1846 frame_id, subframe_id, slot_id, sym_id,
1847 prb_map_elm->nRBStart, prb_map_elm->nRBSize,
1849 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1850 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1851 xran_get_upul_seqid(pHandle, cc_id, ant_id),
1854 rte_mbuf_sanity_check((struct rte_mbuf *)mb, 0);
1856 tx_bytes_counter += rte_pktmbuf_pkt_len((struct rte_mbuf *)mb);
1857 p_xran_dev_ctx->send_upmbuf2ring((struct rte_mbuf *)mb, ETHER_TYPE_ECPRI);
1859 p_sect_iq = pos + p_sec_desc->iq_buffer_offset;
1860 prb_num = prb_map_elm->nRBSize;
1862 if( prb_num > 136 || prb_num == 0) {
1863 /* first 136 PRBs */
1864 rte_panic("first 136 PRBs\n");
1865 send_symbol_ex(direction,
1868 (struct rb_map *)p_sect_iq,
1869 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1870 frame_id, subframe_id, slot_id, sym_id,
1873 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1874 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1875 xran_get_upul_seqid(pHandle, cc_id, ant_id));
1877 pos += 136 * N_SC_PER_PRB * (iq_sample_size_bits/8)*2;
1879 send_symbol_ex(direction, sec_id,
1881 (struct rb_map *)p_sect_iq,
1882 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1883 frame_id, subframe_id, slot_id, sym_id,
1886 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1887 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1888 xran_get_upul_seqid(pHandle, cc_id, ant_id));
1891 send_symbol_ex(direction,
1892 sec_id, /* xran_alloc_sectionid(pHandle, direction, cc_id, ant_id, slot_id)*/
1893 /*(struct rte_mbuf *)mb*/ NULL,
1894 (struct rb_map *)p_sect_iq,
1895 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1896 frame_id, subframe_id, slot_id, sym_id,
1897 prb_map_elm->nRBStart, prb_map_elm->nRBSize,
1899 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1900 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1901 xran_get_upul_seqid(pHandle, cc_id, ant_id));
1909 printf("(%d %d %d %d) prb_map == NULL\n", tti % XRAN_N_FE_BUF_LEN, cc_id, ant_id, sym_id);
1912 if(p_xran_dev_ctx->enablePrach
1913 && (p_xran_dev_ctx->fh_init.io_cfg.id == O_RU)) { /* Only RU needs to send PRACH I/Q */
1914 uint32_t is_prach_slot = xran_is_prach_slot(subframe_id, slot_id);
1915 if(((frame_id % pPrachCPConfig->x) == pPrachCPConfig->y[0])
1916 && (is_prach_slot == 1)
1917 && (sym_id >= p_xran_dev_ctx->prach_start_symbol[cc_id])
1918 && (sym_id <= p_xran_dev_ctx->prach_last_symbol[cc_id])) { //is prach slot
1919 int prach_port_id = ant_id + pPrachCPConfig->eAxC_offset;
1920 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pData;
1921 pos += (sym_id - p_xran_dev_ctx->prach_start_symbol[cc_id]) * pPrachCPConfig->numPrbc * N_SC_PER_PRB * 4;
1922 mb = NULL;//(void*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pCtrl;
1924 send_symbol_ex(direction,
1925 xran_alloc_sectionid(pHandle, direction, cc_id, prach_port_id, slot_id),
1926 (struct rte_mbuf *)mb,
1927 (struct rb_map *)pos,
1928 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1929 frame_id, subframe_id, slot_id, sym_id,
1930 pPrachCPConfig->startPrbc, pPrachCPConfig->numPrbc,
1931 cc_id, prach_port_id,
1932 xran_get_upul_seqid(pHandle, cc_id, prach_port_id));
1934 } /* if((frame_id % pPrachCPConfig->x == pPrachCPConfig->y[0]) .... */
1935 } /* if(p_xran_dev_ctx->enablePrach ..... */
1938 if(p_xran_dev_ctx->enableSrs && (p_xran_dev_ctx->fh_init.io_cfg.id == O_RU)){
1939 if( p_srs_cfg->symbMask & (1 << sym_id) /* is SRS symbol */
1941 int32_t ant_elm_id = 0;
1943 for (ant_elm_id = 0; ant_elm_id < num_ant_elm; ant_elm_id++){
1944 int32_t ant_elm_eAxC_id = ant_elm_id + p_srs_cfg->eAxC_offset;
1946 pos = (char*) p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_elm_id].sBufferList.pBuffers[sym_id].pData;
1947 mb = (void*) p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_elm_id].sBufferList.pBuffers[sym_id].pCtrl;
1949 if( prb_num > 136 || prb_num == 0) {
1950 uint16_t sec_id = xran_alloc_sectionid(pHandle, direction, cc_id, ant_id, slot_id);
1951 /* first 136 PRBs */
1952 send_symbol_ex(direction,
1955 (struct rb_map *)pos,
1956 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1957 frame_id, subframe_id, slot_id, sym_id,
1959 cc_id, ant_elm_eAxC_id,
1960 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1961 xran_get_updl_seqid(pHandle, cc_id, ant_elm_eAxC_id) :
1962 xran_get_upul_seqid(pHandle, cc_id, ant_elm_eAxC_id));
1964 pos += 136 * N_SC_PER_PRB * (iq_sample_size_bits/8)*2;
1966 send_symbol_ex(direction, sec_id,
1968 (struct rb_map *)pos,
1969 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1970 frame_id, subframe_id, slot_id, sym_id,
1972 cc_id, ant_elm_eAxC_id,
1973 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1974 xran_get_updl_seqid(pHandle, cc_id, ant_elm_eAxC_id) :
1975 xran_get_upul_seqid(pHandle, cc_id, ant_elm_eAxC_id));
1977 send_symbol_ex(direction,
1978 xran_alloc_sectionid(pHandle, direction, cc_id, ant_elm_eAxC_id, slot_id),
1979 (struct rte_mbuf *)mb,
1980 (struct rb_map *)pos,
1981 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1982 frame_id, subframe_id, slot_id, sym_id,
1984 cc_id, ant_elm_eAxC_id,
1985 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1986 xran_get_updl_seqid(pHandle, cc_id, ant_elm_eAxC_id) :
1987 xran_get_upul_seqid(pHandle, cc_id, ant_elm_eAxC_id));
1990 } /* for ant elem */
1993 } /* RU mode or C-Plane is not used */
2000 int32_t xran_process_tx_sym_cp_on(uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id,
2001 uint32_t slot_id, uint32_t sym_id)
2004 uint64_t t1 = MLogTick();
2006 struct rte_mbuf *eth_oran_hdr = NULL;
2007 char *ext_buff = NULL;
2008 uint16_t ext_buff_len = 0;
2009 struct rte_mbuf *tmp = NULL;
2010 rte_iova_t ext_buff_iova = 0;
2011 void *pHandle = NULL;
2013 char *p_sec_iq = NULL;
2016 uint16_t iq_sample_size_bits = 16; // TODO: make dynamic per
2018 int32_t num_sections = 0;
2020 struct xran_section_info *sectinfo = NULL;
2021 struct xran_device_ctx *p_xran_dev_ctx = xran_dev_get_ctx();
2023 struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
2024 struct xran_srs_config *p_srs_cfg = &(p_xran_dev_ctx->srs_cfg);
2025 enum xran_pkt_dir direction;
2027 struct rte_mbuf_ext_shared_info * p_share_data = &share_data[tti % XRAN_N_FE_BUF_LEN];
2029 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
2030 direction = XRAN_DIR_DL; /* O-DU */
2031 prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
2033 direction = XRAN_DIR_UL; /* RU */
2034 prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
2038 num_sections = xran_cp_getsize_section_info(pHandle, direction, cc_id, ant_id, ctx_id);
2039 /* iterate C-Plane configuration to generate corresponding U-Plane */
2040 while(next < num_sections) {
2041 sectinfo = xran_cp_iterate_section_info(pHandle, direction, cc_id, ant_id, ctx_id, &next);
2043 if(sectinfo == NULL)
2046 if(sectinfo->type != XRAN_CP_SECTIONTYPE_1) { /* only supports type 1 */
2047 print_err("Invalid section type in section DB - %d", sectinfo->type);
2051 /* skip, if not scheduled */
2052 if(sym_id < sectinfo->startSymId || sym_id >= sectinfo->startSymId + sectinfo->numSymbol)
2055 if(sectinfo->compMeth)
2056 iq_sample_size_bits = sectinfo->iqWidth;
2058 print_dbg(">>> sym %2d [%d] type%d, id %d, startPrbc=%d, numPrbc=%d, numSymbol=%d\n", sym_id, next,
2059 sectinfo->type, sectinfo->id, sectinfo->startPrbc,
2060 sectinfo->numPrbc, sectinfo->numSymbol);
2062 p_xran_dev_ctx->tx_mbufs[0].len = 0;
2063 uint16_t len = p_xran_dev_ctx->tx_mbufs[0].len;
2067 //Added for Klocworks
2068 if (len >= MBUF_TABLE_SIZE)
2069 len = MBUF_TABLE_SIZE - 1;
2071 pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
2072 mb = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
2075 p_sec_iq = ((char*)pos + sectinfo->sec_desc[sym_id].iq_buffer_offset);
2077 /* calculete offset for external buffer */
2078 ext_buff_len = sectinfo->sec_desc[sym_id].iq_buffer_len;
2079 ext_buff = p_sec_iq - (RTE_PKTMBUF_HEADROOM +
2080 sizeof (struct xran_ecpri_hdr) +
2081 sizeof (struct radio_app_common_hdr) +
2082 sizeof(struct data_section_hdr));
2084 ext_buff_len += RTE_PKTMBUF_HEADROOM +
2085 sizeof (struct xran_ecpri_hdr) +
2086 sizeof (struct radio_app_common_hdr) +
2087 sizeof(struct data_section_hdr) + 18;
2089 if(sectinfo->compMeth != XRAN_COMPMETHOD_NONE){
2090 ext_buff -= sizeof (struct data_section_compression_hdr);
2091 ext_buff_len += sizeof (struct data_section_compression_hdr);
2094 eth_oran_hdr = rte_pktmbuf_alloc(_eth_mbuf_pool_small);
2096 if (unlikely (( eth_oran_hdr) == NULL)) {
2097 rte_panic("Failed rte_pktmbuf_alloc\n");
2100 p_share_data->free_cb = extbuf_free_callback;
2101 p_share_data->fcb_opaque = NULL;
2102 rte_mbuf_ext_refcnt_set(p_share_data, 1);
2104 ext_buff_iova = rte_mempool_virt2iova(mb);
2105 if (unlikely (( ext_buff_iova) == 0)) {
2106 rte_panic("Failed rte_mem_virt2iova \n");
2109 if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
2110 rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
2113 rte_pktmbuf_attach_extbuf(eth_oran_hdr,
2115 ext_buff_iova + RTE_PTR_DIFF(ext_buff , mb),
2119 rte_pktmbuf_reset_headroom(eth_oran_hdr);
2121 tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(eth_oran_hdr, sizeof(struct ether_hdr));
2122 if (unlikely (( tmp) == NULL)) {
2123 rte_panic("Failed rte_pktmbuf_prepend \n");
2127 rte_pktmbuf_refcnt_update(mb, 1); /* make sure eth won't free our mbuf */
2129 /* first all PRBs */
2130 prepare_symbol_ex(direction, sectinfo->id,
2132 (struct rb_map *)p_sec_iq,
2135 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
2136 frame_id, subframe_id, slot_id, sym_id,
2137 sectinfo->startPrbc, sectinfo->numPrbc,
2139 xran_get_updl_seqid(pHandle, cc_id, ant_id),
2142 /* if we don't need to do any fragmentation */
2143 if (likely (p_xran_dev_ctx->fh_init.mtu >=
2144 sectinfo->numPrbc * (3*iq_sample_size_bits + 1))) {
2145 /* no fragmentation */
2146 p_xran_dev_ctx->tx_mbufs[0].m_table[len] = mb;
2150 uint8_t * seq_num = xran_get_updl_seqid_addr(pHandle, cc_id, ant_id);
2154 rte_panic("pointer to seq number is NULL [CC %d Ant %d]\n", cc_id, ant_id);
2156 len2 = xran_app_fragment_packet(mb,
2157 &p_xran_dev_ctx->tx_mbufs[0].m_table[len],
2158 (uint16_t)(MBUF_TABLE_SIZE - len),
2159 p_xran_dev_ctx->fh_init.mtu,
2160 p_xran_dev_ctx->direct_pool,
2161 p_xran_dev_ctx->indirect_pool,
2165 /* Free input packet */
2166 rte_pktmbuf_free(mb);
2168 /* If we fail to fragment the packet */
2169 if (unlikely (len2 < 0)){
2170 print_err("len2= %d\n", len2);
2176 for (i = len; i < len + len2; i ++) {
2178 m = p_xran_dev_ctx->tx_mbufs[0].m_table[i];
2179 struct ether_hdr *eth_hdr = (struct ether_hdr *)
2180 rte_pktmbuf_prepend(m, (uint16_t)sizeof(struct ether_hdr));
2181 if (eth_hdr == NULL) {
2182 rte_panic("No headroom in mbuf.\n");
2189 if (unlikely(len > XRAN_MAX_PKT_BURST_PER_SYM)) {
2190 rte_panic("XRAN_MAX_PKT_BURST_PER_SYM\n");
2193 /* Transmit packets */
2194 xran_send_burst(p_xran_dev_ctx, (uint16_t)len, 0);
2195 p_xran_dev_ctx->tx_mbufs[0].len = 0;
2197 } /* while(section) */
2202 int32_t xran_process_tx_sym(void *arg)
2207 uint32_t mlogVar[10];
2208 uint32_t mlogVarCnt = 0;
2210 unsigned long t1 = MLogTick();
2212 void *pHandle = NULL;
2215 uint8_t num_eAxc = 0;
2216 uint8_t num_CCPorts = 0;
2217 uint8_t num_ant_elm = 0;
2218 uint32_t frame_id = 0;
2219 uint32_t subframe_id = 0;
2220 uint32_t slot_id = 0;
2221 uint32_t sym_id = 0;
2222 uint32_t sym_idx = 0;
2225 enum xran_pkt_dir direction;
2226 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2228 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
2231 /* O-RU: send symb after OTA time with delay (UL) */
2232 /* O-DU: send symb in advance of OTA time (DL) */
2233 sym_idx = XranOffsetSym(p_xran_dev_ctx->sym_up, xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT*SLOTNUM_PER_SUBFRAME*1000);
2235 tti = XranGetTtiNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2236 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
2237 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
2238 frame_id = XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
2239 // ORAN frameId, 8 bits, [0, 255]
2240 frame_id = (frame_id & 0xff);
2242 sym_id = XranGetSymNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2243 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
2245 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
2248 mlogVar[mlogVarCnt++] = 0xAAAAAAAA;
2249 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx;
2250 mlogVar[mlogVarCnt++] = sym_idx;
2251 mlogVar[mlogVarCnt++] = abs(p_xran_dev_ctx->sym_up);
2252 mlogVar[mlogVarCnt++] = tti;
2253 mlogVar[mlogVarCnt++] = frame_id;
2254 mlogVar[mlogVarCnt++] = subframe_id;
2255 mlogVar[mlogVarCnt++] = slot_id;
2256 mlogVar[mlogVarCnt++] = sym_id;
2257 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
2260 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU && xran_get_ru_category(pHandle) == XRAN_CATEGORY_B) {
2261 num_eAxc = xran_get_num_eAxcUl(pHandle);
2263 num_eAxc = xran_get_num_eAxc(pHandle);
2266 num_CCPorts = xran_get_num_cc(pHandle);
2268 for(ant_id = 0; ant_id < num_eAxc; ant_id++) {
2269 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
2270 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU && p_xran_dev_ctx->enableCP){
2271 retval = xran_process_tx_sym_cp_on(ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id);
2273 retval = xran_process_tx_sym_cp_off(ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id, (ant_id == (num_eAxc - 1)));
2275 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
2276 } /* for(ant_id = 0; ant_id < num_eAxc; ant_id++) */
2278 MLogTask(PID_PROCESS_TX_SYM, t1, MLogTick());
2282 int xran_packet_and_dpdk_timer_thread(void *args)
2284 struct xran_ethdi_ctx *const ctx = xran_ethdi_get_ctx();
2286 uint64_t prev_tsc = 0;
2287 uint64_t cur_tsc = rte_rdtsc();
2288 uint64_t diff_tsc = cur_tsc - prev_tsc;
2290 struct sched_param sched_param;
2292 printf("%s [CPU %2d] [PID: %6d]\n", __FUNCTION__, rte_lcore_id(), getpid());
2294 sched_param.sched_priority = XRAN_THREAD_DEFAULT_PRIO;
2296 if ((res = pthread_setschedparam(pthread_self(), 1, &sched_param)))
2298 printf("priority is not changed: coreId = %d, result1 = %d\n",rte_lcore_id(), res);
2303 cur_tsc = rte_rdtsc();
2304 diff_tsc = cur_tsc - prev_tsc;
2305 if (diff_tsc > TIMER_RESOLUTION_CYCLES) {
2310 if (XRAN_STOPPED == xran_if_current_state)
2314 printf("Closing pkts timer thread...\n");
2319 int32_t xran_init(int argc, char *argv[],
2320 struct xran_fh_init *p_xran_fh_init, char *appName, void ** pXranLayerHandle)
2325 struct xran_io_loop_cfg *p_io_cfg = (struct xran_io_loop_cfg *)&p_xran_fh_init->io_cfg;
2326 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2328 cpu_set_t system_cpuset;
2329 pthread_getaffinity_np(pthread_self(), sizeof(cpu_set_t), &system_cpuset);
2330 for (j = 0; j < CPU_SETSIZE; j++)
2331 if (CPU_ISSET(j, &system_cpuset))
2334 int32_t lcore_id = j;
2337 int64_t offset_sec, offset_nsec;
2339 memset(p_xran_dev_ctx, 0, sizeof(struct xran_device_ctx));
2342 p_xran_dev_ctx->fh_init = *p_xran_fh_init;
2344 printf(" %s: MTU %d\n", __FUNCTION__, p_xran_dev_ctx->fh_init.mtu);
2346 xran_if_current_state = XRAN_INIT;
2348 memcpy(&(p_xran_dev_ctx->eAxc_id_cfg), &(p_xran_fh_init->eAxCId_conf), sizeof(struct xran_eaxcid_config));
2350 p_xran_dev_ctx->enableCP = p_xran_fh_init->enableCP;
2351 p_xran_dev_ctx->enablePrach = p_xran_fh_init->prachEnable;
2352 p_xran_dev_ctx->enableSrs = p_xran_fh_init->srsEnable;
2353 p_xran_dev_ctx->DynamicSectionEna = p_xran_fh_init->DynamicSectionEna;
2355 /* To make sure to set default functions */
2356 p_xran_dev_ctx->send_upmbuf2ring = NULL;
2357 p_xran_dev_ctx->send_cpmbuf2ring = NULL;
2359 xran_register_ethertype_handler(ETHER_TYPE_ECPRI, handle_ecpri_ethertype);
2360 if (p_io_cfg->id == 0)
2361 xran_ethdi_init_dpdk_io(p_xran_fh_init->filePrefix,
2364 (struct ether_addr *)p_xran_fh_init->p_o_du_addr,
2365 (struct ether_addr *)p_xran_fh_init->p_o_ru_addr,
2366 p_xran_fh_init->cp_vlan_tag,
2367 p_xran_fh_init->up_vlan_tag);
2369 xran_ethdi_init_dpdk_io(p_xran_fh_init->filePrefix,
2372 (struct ether_addr *)p_xran_fh_init->p_o_ru_addr,
2373 (struct ether_addr *)p_xran_fh_init->p_o_du_addr,
2374 p_xran_fh_init->cp_vlan_tag,
2375 p_xran_fh_init->up_vlan_tag);
2377 for(i = 0; i < 10; i++ )
2378 rte_timer_init(&tti_to_phy_timer[i]);
2380 rte_timer_init(&sym_timer);
2381 for (i = 0; i< MAX_NUM_OF_DPDK_TIMERS; i++)
2382 rte_timer_init(&dpdk_timer[i]);
2384 p_xran_dev_ctx->direct_pool = socket_direct_pool;
2385 p_xran_dev_ctx->indirect_pool = socket_indirect_pool;
2387 for (i = 0; i< XRAN_MAX_SECTOR_NR; i++){
2388 for (j = 0; j< XRAN_NUM_OF_SYMBOL_PER_SLOT; j++){
2389 LIST_INIT (&p_xran_dev_ctx->sym_cb_list_head[i][j]);
2393 printf("Set debug stop %d, debug stop count %d\n", p_xran_fh_init->debugStop, p_xran_fh_init->debugStopCount);
2394 timing_set_debug_stop(p_xran_fh_init->debugStop, p_xran_fh_init->debugStopCount);
2396 for (uint32_t nCellIdx = 0; nCellIdx < XRAN_MAX_SECTOR_NR; nCellIdx++){
2397 xran_fs_clear_slot_type(nCellIdx);
2400 *pXranLayerHandle = p_xran_dev_ctx;
2402 if(p_xran_fh_init->GPS_Alpha || p_xran_fh_init->GPS_Beta ){
2403 offset_sec = p_xran_fh_init->GPS_Beta / 100; //resolution of beta is 10ms
2404 offset_nsec = (p_xran_fh_init->GPS_Beta - offset_sec * 100) * 1e7 + p_xran_fh_init->GPS_Alpha;
2405 p_xran_dev_ctx->offset_sec = offset_sec;
2406 p_xran_dev_ctx->offset_nsec = offset_nsec;
2408 p_xran_dev_ctx->offset_sec = 0;
2409 p_xran_dev_ctx->offset_nsec = 0;
2415 int32_t xran_sector_get_instances (void * pDevHandle, uint16_t nNumInstances,
2416 xran_cc_handle_t * pSectorInstanceHandles)
2418 xran_status_t nStatus = XRAN_STATUS_FAIL;
2419 struct xran_device_ctx *pDev = (struct xran_device_ctx *)pDevHandle;
2420 XranSectorHandleInfo *pCcHandle = NULL;
2423 /* Check for the Valid Parameters */
2424 CHECK_NOT_NULL (pSectorInstanceHandles, XRAN_STATUS_INVALID_PARAM);
2426 if (!nNumInstances) {
2427 print_dbg("Instance is not assigned for this function !!! \n");
2428 return XRAN_STATUS_INVALID_PARAM;
2431 for (i = 0; i < nNumInstances; i++) {
2433 /* Allocate Memory for CC handles */
2434 pCcHandle = (XranSectorHandleInfo *) _mm_malloc( /*"xran_cc_handles",*/ sizeof (XranSectorHandleInfo), 64);
2436 if(pCcHandle == NULL)
2437 return XRAN_STATUS_RESOURCE;
2439 memset (pCcHandle, 0, (sizeof (XranSectorHandleInfo)));
2441 pCcHandle->nIndex = i;
2442 pCcHandle->nXranPort = pDev->xran_port_id;
2444 printf("%s [%d]: CC %d handle %p\n", __FUNCTION__, pDev->xran_port_id, i, pCcHandle);
2445 pLibInstanceHandles[pDev->xran_port_id][i] = pSectorInstanceHandles[i] = pCcHandle;
2447 printf("Handle: %p Instance: %p\n",
2448 &pSectorInstanceHandles[i], pSectorInstanceHandles[i]);
2451 return XRAN_STATUS_SUCCESS;
2454 int32_t xran_mm_init (void * pHandle, uint64_t nMemorySize,
2455 uint32_t nMemorySegmentSize)
2457 /* we use mbuf from dpdk memory */
2461 int32_t xran_bm_init (void * pHandle, uint32_t * pPoolIndex, uint32_t nNumberOfBuffers, uint32_t nBufferSize)
2463 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2464 uint32_t nAllocBufferSize;
2466 char pool_name[RTE_MEMPOOL_NAMESIZE];
2468 snprintf(pool_name, RTE_MEMPOOL_NAMESIZE, "ru_%d_cc_%d_idx_%d",
2469 pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex);
2471 nAllocBufferSize = nBufferSize + sizeof(struct ether_hdr) +
2472 sizeof (struct xran_ecpri_hdr) +
2473 sizeof (struct radio_app_common_hdr) +
2474 sizeof(struct data_section_hdr) + 256;
2476 if(nAllocBufferSize >= UINT16_MAX) {
2477 rte_panic("nAllocBufferSize is failed [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d nAllocBufferSize %d\n",
2478 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize, nAllocBufferSize);
2482 printf("%s: [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d\n", pool_name,
2483 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize);
2485 pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex] = rte_pktmbuf_pool_create(pool_name, nNumberOfBuffers,
2486 MBUF_CACHE, 0, nAllocBufferSize, rte_socket_id());
2488 if(pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex] == NULL){
2489 rte_panic("rte_pktmbuf_pool_create failed [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d errno %s\n",
2490 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize, rte_strerror(rte_errno));
2494 pXranCc->bufferPoolElmSz[pXranCc->nBufferPoolIndex] = nBufferSize;
2495 pXranCc->bufferPoolNumElm[pXranCc->nBufferPoolIndex] = nNumberOfBuffers;
2497 printf("CC:[ handle %p ru %d cc_idx %d ] [nPoolIndex %d] mb pool %p \n",
2498 pXranCc, pXranCc->nXranPort, pXranCc->nIndex,
2499 pXranCc->nBufferPoolIndex, pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex]);
2501 *pPoolIndex = pXranCc->nBufferPoolIndex++;
2506 int32_t xran_bm_allocate_buffer(void * pHandle, uint32_t nPoolIndex, void **ppData, void **ppCtrl)
2508 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2512 struct rte_mbuf * mb = rte_pktmbuf_alloc(pXranCc->p_bufferPool[nPoolIndex]);
2515 char * start = rte_pktmbuf_append(mb, pXranCc->bufferPoolElmSz[nPoolIndex]);
2516 char * ethhdr = rte_pktmbuf_prepend(mb, sizeof(struct ether_hdr));
2518 if(start && ethhdr){
2519 char * iq_offset = rte_pktmbuf_mtod(mb, char * );
2521 iq_offset = iq_offset + sizeof(struct ether_hdr) +
2522 sizeof (struct xran_ecpri_hdr) +
2523 sizeof (struct radio_app_common_hdr) +
2524 sizeof(struct data_section_hdr);
2526 if (0) /* if compression */
2527 iq_offset += sizeof (struct data_section_compression_hdr);
2529 *ppData = (void *)iq_offset;
2530 *ppCtrl = (void *)mb;
2532 print_err("[nPoolIndex %d] start ethhdr failed \n", nPoolIndex );
2536 print_err("[nPoolIndex %d] mb alloc failed \n", nPoolIndex );
2540 if (*ppData == NULL){
2541 print_err("[nPoolIndex %d] rte_pktmbuf_append for %d failed \n", nPoolIndex, pXranCc->bufferPoolElmSz[nPoolIndex]);
2548 int32_t xran_bm_free_buffer(void * pHandle, void *pData, void *pCtrl)
2550 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2553 rte_pktmbuf_free(pCtrl);
2558 int32_t xran_5g_fronthault_config (void * pHandle,
2559 struct xran_buffer_list *pSrcBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2560 struct xran_buffer_list *pSrcCpBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2561 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2562 struct xran_buffer_list *pDstCpBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2563 xran_transport_callback_fn pCallback,
2566 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2567 xran_status_t nStatus = XRAN_STATUS_SUCCESS;
2569 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2571 print_dbg("%s\n", __FUNCTION__);
2575 printf("Handle is NULL!\n");
2576 return XRAN_STATUS_FAIL;
2579 if (pCallback == NULL)
2581 printf ("no callback\n");
2582 return XRAN_STATUS_FAIL;
2585 i = pXranCc->nIndex;
2587 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2589 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2592 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].bValid = 0;
2593 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2594 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2595 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2596 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2597 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulTxBuffers[j][i][z][0];
2599 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList = *pSrcBuffer[z][j];
2602 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
2603 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2604 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2605 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2606 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2607 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulTxPrbMapBuffers[j][i][z][0];
2609 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList = *pSrcCpBuffer[z][j];
2613 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].bValid = 0;
2614 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2615 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2616 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2617 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2618 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulRxBuffers[j][i][z][0];
2620 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2623 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
2624 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2625 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2626 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2627 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2628 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulRxPrbMapBuffers[j][i][z][0];
2630 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList = *pDstCpBuffer[z][j];
2635 p_xran_dev_ctx->pCallback[i] = pCallback;
2636 p_xran_dev_ctx->pCallbackTag[i] = pCallbackTag;
2638 p_xran_dev_ctx->xran2phy_mem_ready = 1;
2643 int32_t xran_5g_prach_req (void * pHandle,
2644 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2645 xran_transport_callback_fn pCallback,
2648 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2649 xran_status_t nStatus = XRAN_STATUS_SUCCESS;
2651 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2655 printf("Handle is NULL!\n");
2656 return XRAN_STATUS_FAIL;
2658 if (pCallback == NULL)
2660 printf ("no callback\n");
2661 return XRAN_STATUS_FAIL;
2664 i = pXranCc->nIndex;
2666 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2668 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2669 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].bValid = 0;
2670 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2671 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2672 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2673 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_MAX_ANTENNA_NR; // ant number.
2674 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFHPrachRxBuffers[j][i][z][0];
2675 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2679 p_xran_dev_ctx->pPrachCallback[i] = pCallback;
2680 p_xran_dev_ctx->pPrachCallbackTag[i] = pCallbackTag;
2686 int32_t xran_5g_srs_req (void * pHandle,
2687 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN],
2688 xran_transport_callback_fn pCallback,
2691 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2692 xran_status_t nStatus = XRAN_STATUS_SUCCESS;
2694 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2698 printf("Handle is NULL!\n");
2699 return XRAN_STATUS_FAIL;
2701 if (pCallback == NULL)
2703 printf ("no callback\n");
2704 return XRAN_STATUS_FAIL;
2707 i = pXranCc->nIndex;
2709 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2711 for(z = 0; z < XRAN_MAX_ANT_ARRAY_ELM_NR; z++){
2712 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].bValid = 0;
2713 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2714 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2715 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2716 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_MAX_ANT_ARRAY_ELM_NR; // ant number.
2717 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFHSrsRxBuffers[j][i][z][0];
2718 p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2722 p_xran_dev_ctx->pSrsCallback[i] = pCallback;
2723 p_xran_dev_ctx->pSrsCallbackTag[i] = pCallbackTag;
2728 uint32_t xran_get_time_stats(uint64_t *total_time, uint64_t *used_time, uint32_t *core_used, uint32_t clear)
2730 *total_time = xran_total_tick;
2731 *used_time = xran_used_tick;
2732 *core_used = xran_core_used;
2736 xran_total_tick = 0;
2743 void * xran_malloc(size_t buf_len)
2745 return rte_malloc("External buffer", buf_len, RTE_CACHE_LINE_SIZE);
2748 uint8_t *xran_add_hdr_offset(uint8_t *dst, int16_t compMethod)
2750 dst+= (RTE_PKTMBUF_HEADROOM +
2751 sizeof (struct xran_ecpri_hdr) +
2752 sizeof (struct radio_app_common_hdr) +
2753 sizeof(struct data_section_hdr));
2755 if(compMethod != XRAN_COMPMETHOD_NONE)
2756 dst += sizeof (struct data_section_compression_hdr);
2758 dst = RTE_PTR_ALIGN_CEIL(dst, 64);
2763 int32_t xran_open(void *pHandle, struct xran_fh_config* pConf)
2766 uint8_t nNumerology = 0;
2767 int32_t lcore_id = 0;
2768 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2769 struct xran_fh_config *pFhCfg;
2770 pFhCfg = &(p_xran_dev_ctx->fh_cfg);
2772 memcpy(pFhCfg, pConf, sizeof(struct xran_fh_config));
2774 if(pConf->log_level)
2775 printf(" %s: O-RU Category %s\n", __FUNCTION__, (pFhCfg->ru_conf.xranCat == XRAN_CATEGORY_A) ? "A" : "B");
2777 nNumerology = xran_get_conf_numerology(pHandle);
2779 if (pConf->nCC > XRAN_MAX_SECTOR_NR)
2781 if(pConf->log_level)
2782 printf("Number of cells %d exceeds max number supported %d!\n", pConf->nCC, XRAN_MAX_SECTOR_NR);
2783 pConf->nCC = XRAN_MAX_SECTOR_NR;
2786 if(pConf->ru_conf.iqOrder != XRAN_I_Q_ORDER
2787 || pConf->ru_conf.byteOrder != XRAN_NE_BE_BYTE_ORDER ){
2789 print_err("Byte order and/or IQ order is not supported [IQ %d byte %d]\n", pConf->ru_conf.iqOrder, pConf->ru_conf.byteOrder);
2790 return XRAN_STATUS_FAIL;
2793 /* setup PRACH configuration for C-Plane */
2794 xran_init_prach(pConf, p_xran_dev_ctx);
2795 xran_init_srs(pConf, p_xran_dev_ctx);
2797 xran_cp_init_sectiondb(pHandle);
2798 xran_init_sectionid(pHandle);
2799 xran_init_seqid(pHandle);
2801 if(pConf->ru_conf.xran_max_frame) {
2802 xran_max_frame = pConf->ru_conf.xran_max_frame;
2803 printf("xran_max_frame %d\n", xran_max_frame);
2806 interval_us = xran_fs_get_tti_interval(nNumerology);
2808 if(pConf->log_level){
2809 printf("%s: interval_us=%ld\n", __FUNCTION__, interval_us);
2811 timing_set_numerology(nNumerology);
2813 for(i = 0 ; i <pConf->nCC; i++){
2814 xran_fs_set_slot_type(i, pConf->frame_conf.nFrameDuplexType, pConf->frame_conf.nTddPeriod,
2815 pConf->frame_conf.sSlotConfig);
2818 xran_fs_slot_limit_init(xran_fs_get_tti_interval(nNumerology));
2820 if(xran_ethdi_get_ctx()->io_cfg.bbdev_mode != XRAN_BBDEV_NOT_USED){
2821 p_xran_dev_ctx->bbdev_dec = pConf->bbdev_dec;
2822 p_xran_dev_ctx->bbdev_enc = pConf->bbdev_enc;
2825 /* if send_xpmbuf2ring needs to be changed from default functions,
2826 * then those should be set between xran_init and xran_open */
2827 if(p_xran_dev_ctx->send_cpmbuf2ring == NULL)
2828 p_xran_dev_ctx->send_cpmbuf2ring = xran_ethdi_mbuf_send_cp;
2829 if(p_xran_dev_ctx->send_upmbuf2ring == NULL)
2830 p_xran_dev_ctx->send_upmbuf2ring = xran_ethdi_mbuf_send;
2832 /* Start packet processing thread */
2833 if((uint16_t)xran_ethdi_get_ctx()->io_cfg.port[XRAN_UP_VF] != 0xFFFF &&
2834 (uint16_t)xran_ethdi_get_ctx()->io_cfg.port[XRAN_CP_VF] != 0xFFFF ){
2835 if(pConf->log_level){
2836 print_dbg("XRAN_UP_VF: 0x%04x\n", xran_ethdi_get_ctx()->io_cfg.port[XRAN_UP_VF]);
2837 print_dbg("XRAN_CP_VF: 0x%04x\n", xran_ethdi_get_ctx()->io_cfg.port[XRAN_CP_VF]);
2839 if (rte_eal_remote_launch(xran_timing_source_thread, xran_dev_get_ctx(), xran_ethdi_get_ctx()->io_cfg.timing_core))
2840 rte_panic("thread_run() failed to start\n");
2841 } else if(pConf->log_level){
2842 printf("Eth port was not open. Processing thread was not started\n");
2848 int32_t xran_start(void *pHandle)
2850 if(xran_get_if_state() == XRAN_RUNNING) {
2851 print_err("Already STARTED!!");
2855 xran_if_current_state = XRAN_RUNNING;
2859 int32_t xran_stop(void *pHandle)
2861 if(xran_get_if_state() == XRAN_STOPPED) {
2862 print_err("Already STOPPED!!");
2866 xran_if_current_state = XRAN_STOPPED;
2870 int32_t xran_close(void *pHandle)
2872 xran_if_current_state = XRAN_STOPPED;
2873 //TODO: fix memory leak xran_cp_free_sectiondb(pHandle);
2874 //rte_eal_mp_wait_lcore();
2875 //xran_ethdi_ports_stats();
2877 #ifdef RTE_LIBRTE_PDUMP
2878 /* uninitialize packet capture framework */
2884 int32_t xran_mm_destroy (void * pHandle)
2886 if(xran_get_if_state() == XRAN_RUNNING) {
2887 print_err("Please STOP first !!");
2891 /* functionality is not yet implemented */
2895 int32_t xran_reg_sym_cb(void *pHandle, xran_callback_sym_fn symCb, void * symCbParam, uint8_t symb, uint8_t ant)
2897 if(xran_get_if_state() == XRAN_RUNNING) {
2898 print_err("Cannot register callback while running!!\n");
2902 /* functionality is not yet implemented */
2903 print_err("Functionality is not yet implemented !");
2907 int32_t xran_reg_physide_cb(void *pHandle, xran_fh_tti_callback_fn Cb, void *cbParam, int skipTtiNum, enum callback_to_phy_id id)
2909 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2911 if(xran_get_if_state() == XRAN_RUNNING) {
2912 print_err("Cannot register callback while running!!\n");
2916 p_xran_dev_ctx->ttiCb[id] = Cb;
2917 p_xran_dev_ctx->TtiCbParam[id] = cbParam;
2918 p_xran_dev_ctx->SkipTti[id] = skipTtiNum;
2923 /* send_cpmbuf2ring and send_upmbuf2ring should be set between xran_init and xran_open
2924 * each cb will be set by default duing open if it is set by NULL */
2925 int xran_register_cb_mbuf2ring(xran_ethdi_mbuf_send_fn mbuf_send_cp, xran_ethdi_mbuf_send_fn mbuf_send_up)
2927 struct xran_device_ctx *p_xran_dev_ctx;
2929 if(xran_get_if_state() == XRAN_RUNNING) {
2930 print_err("Cannot register callback while running!!\n");
2934 p_xran_dev_ctx = xran_dev_get_ctx();
2936 p_xran_dev_ctx->send_cpmbuf2ring = mbuf_send_cp;
2937 p_xran_dev_ctx->send_upmbuf2ring = mbuf_send_up;
2943 int32_t xran_get_slot_idx (uint32_t *nFrameIdx, uint32_t *nSubframeIdx, uint32_t *nSlotIdx, uint64_t *nSecond)
2947 tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2948 *nSlotIdx = (uint32_t)XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
2949 *nSubframeIdx = (uint32_t)XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
2950 *nFrameIdx = (uint32_t)XranGetFrameNum(tti,xran_getSfnSecStart(),SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
2951 *nSecond = timing_get_current_second();
2958 * @brief Get the configuration of eAxC ID
2960 * @return the pointer of configuration
2962 inline struct xran_eaxcid_config *xran_get_conf_eAxC(void *pHandle)
2964 return (&(xran_dev_get_ctx()->eAxc_id_cfg));
2968 * @brief Get the configuration of the total number of beamforming weights on RU
2970 * @return Configured the number of beamforming weights
2972 inline uint8_t xran_get_conf_num_bfweights(void *pHandle)
2974 return (xran_dev_get_ctx()->fh_init.totalBfWeights);
2978 * @brief Get the configuration of subcarrier spacing for PRACH
2980 * @return subcarrier spacing value for PRACH
2982 inline uint8_t xran_get_conf_prach_scs(void *pHandle)
2984 return (xran_lib_get_ctx_fhcfg()->prach_conf.nPrachSubcSpacing);
2988 * @brief Get the configuration of FFT size for RU
2990 * @return FFT size value for RU
2992 inline uint8_t xran_get_conf_fftsize(void *pHandle)
2994 return (xran_lib_get_ctx_fhcfg()->ru_conf.fftSize);
2998 * @brief Get the configuration of nummerology
3000 * @return Configured numerology
3002 inline uint8_t xran_get_conf_numerology(void *pHandle)
3004 return (xran_lib_get_ctx_fhcfg()->frame_conf.nNumerology);
3008 * @brief Get the configuration of IQ bit width for RU
3010 * @return IQ bit width for RU
3012 inline uint8_t xran_get_conf_iqwidth(void *pHandle)
3014 struct xran_fh_config *pFhCfg;
3016 pFhCfg = xran_lib_get_ctx_fhcfg();
3017 return ((pFhCfg->ru_conf.iqWidth==16)?0:pFhCfg->ru_conf.iqWidth);
3021 * @brief Get the configuration of compression method for RU
3023 * @return Compression method for RU
3025 inline uint8_t xran_get_conf_compmethod(void *pHandle)
3027 return (xran_lib_get_ctx_fhcfg()->ru_conf.compMeth);
3032 * @brief Get the configuration of the number of component carriers
3034 * @return Configured the number of component carriers
3036 inline uint8_t xran_get_num_cc(void *pHandle)
3038 return (xran_lib_get_ctx_fhcfg()->nCC);
3042 * @brief Get the configuration of the number of antenna for UL
3044 * @return Configured the number of antenna
3046 inline uint8_t xran_get_num_eAxc(void *pHandle)
3048 return (xran_lib_get_ctx_fhcfg()->neAxc);
3052 * @brief Get configuration of O-RU (Cat A or Cat B)
3054 * @return Configured the number of antenna
3056 inline enum xran_category xran_get_ru_category(void *pHandle)
3058 return (xran_lib_get_ctx_fhcfg()->ru_conf.xranCat);
3062 * @brief Get the configuration of the number of antenna
3064 * @return Configured the number of antenna
3066 inline uint8_t xran_get_num_eAxcUl(void *pHandle)
3068 return (xran_lib_get_ctx_fhcfg()->neAxcUl);
3072 * @brief Get the configuration of the number of antenna elements
3074 * @return Configured the number of antenna
3076 inline uint8_t xran_get_num_ant_elm(void *pHandle)
3078 return (xran_lib_get_ctx_fhcfg()->nAntElmTRx);
3081 int32_t xran_get_common_counters(void *pXranLayerHandle, struct xran_common_counters *pStats)
3083 struct xran_device_ctx* pDev = (struct xran_device_ctx*)pXranLayerHandle;
3085 if(pStats && pDev) {
3086 *pStats = pDev->fh_counters;
3087 return XRAN_STATUS_SUCCESS;
3089 return XRAN_STATUS_INVALID_PARAM;