1 /******************************************************************************
3 * Copyright (c) 2019 Intel.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
17 *******************************************************************************/
20 * @brief XRAN main functionality module
22 * @ingroup group_source_xran
23 * @author Intel Corporation
39 #include <rte_errno.h>
40 #include <rte_lcore.h>
41 #include <rte_cycles.h>
42 #include <rte_memory.h>
43 #include <rte_memzone.h>
47 #include "xran_fh_o_du.h"
51 #include "xran_up_api.h"
52 #include "xran_cp_api.h"
53 #include "xran_sync_api.h"
54 #include "xran_lib_mlog_tasks_id.h"
55 #include "xran_timer.h"
56 #include "xran_common.h"
57 #include "xran_frame_struct.h"
58 #include "xran_printf.h"
59 #include "xran_app_frag.h"
61 #include "xran_mlog_lnx.h"
63 #define DIV_ROUND_OFFSET(X,Y) ( X/Y + ((X%Y)?1:0) )
65 #define XranOffsetSym(offSym, otaSym, numSymTotal) (((int32_t)offSym > (int32_t)otaSym) ? \
66 ((int32_t)otaSym + ((int32_t)numSymTotal) - (uint32_t)offSym) : \
67 (((int32_t)otaSym - (int32_t)offSym) >= numSymTotal) ? \
68 (((int32_t)otaSym - (int32_t)offSym) - numSymTotal) : \
69 ((int32_t)otaSym - (int32_t)offSym))
71 #define MAX_NUM_OF_XRAN_CTX (2)
72 #define XranIncrementCtx(ctx) ((ctx >= (MAX_NUM_OF_XRAN_CTX-1)) ? 0 : (ctx+1))
73 #define XranDecrementCtx(ctx) ((ctx == 0) ? (MAX_NUM_OF_XRAN_CTX-1) : (ctx-1))
75 #define MAX_NUM_OF_DPDK_TIMERS (10)
76 #define DpdkTiemerIncrementCtx(ctx) ((ctx >= (MAX_NUM_OF_DPDK_TIMERS-1)) ? 0 : (ctx+1))
77 #define DpdkTiemerDecrementCtx(ctx) ((ctx == 0) ? (MAX_NUM_OF_DPDK_TIMERS-1) : (ctx-1))
80 //#define XRAN_CREATE_RBMAP /**< generate slot map base on symbols */
83 struct xran_timer_ctx {
84 uint32_t tti_to_process;
87 static xran_cc_handle_t pLibInstanceHandles[XRAN_PORTS_NUM][XRAN_MAX_SECTOR_NR] = {NULL};
88 static struct xran_device_ctx g_xran_dev_ctx[XRAN_PORTS_NUM] = { 0 };
90 struct xran_timer_ctx timer_ctx[MAX_NUM_OF_XRAN_CTX];
92 static struct rte_timer tti_to_phy_timer[10];
93 static struct rte_timer sym_timer;
94 static struct rte_timer dpdk_timer[MAX_NUM_OF_DPDK_TIMERS];
96 long interval_us = 1000;
98 uint32_t xran_lib_ota_tti = 0; /* [0:(1000000/TTI-1)] */
99 uint32_t xran_lib_ota_sym = 0; /* [0:1000/TTI-1] */
100 uint32_t xran_lib_ota_sym_idx = 0; /* [0 : 14*(1000000/TTI)-1]
101 where TTI is TTI interval in microseconds */
103 static uint8_t xran_cp_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_DIR_MAX][XRAN_MAX_ANTENNA_NR * 2]; //XRAN_MAX_ANTENNA_NR * 2 for PUSCH and PRACH
104 static uint8_t xran_updl_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR];
105 static uint8_t xran_upul_seq_id_num[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2];
107 static uint8_t xran_section_id_curslot[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2];
108 static uint16_t xran_section_id[XRAN_MAX_CELLS_PER_PORT][XRAN_MAX_ANTENNA_NR * 2];
110 void xran_timer_arm(struct rte_timer *tim, void* arg);
112 int xran_process_tx_sym(void *arg);
114 int xran_process_rx_sym(void *arg,
115 struct rte_mbuf *mbuf,
131 int xran_process_prach_sym(void *arg,
132 struct rte_mbuf *mbuf,
147 void tti_ota_cb(struct rte_timer *tim, void *arg);
148 void tti_to_phy_cb(struct rte_timer *tim, void *arg);
149 void xran_timer_arm_ex(struct rte_timer *tim, void* CbFct, void *CbArg, unsigned tim_lcore);
151 struct xran_device_ctx *xran_dev_get_ctx(void)
153 return &g_xran_dev_ctx[0];
156 static inline struct xran_fh_config *xran_lib_get_ctx_fhcfg(void)
158 return (&(xran_dev_get_ctx()->fh_cfg));
161 uint16_t xran_get_beamid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id)
163 return (0); // NO BEAMFORMING
166 enum xran_if_state xran_get_if_state(void)
168 return xran_if_current_state;
171 int xran_isPRACHSlot(uint32_t subframe_id, uint32_t slot_id)
173 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
174 xRANPrachCPConfigStruct *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
177 if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology < 2){
178 //for FR1, in 38.211 tab 6.3.3.2-2&3 it is subframe index
179 if (pPrachCPConfig->isPRACHslot[subframe_id] == 1){
180 if (pPrachCPConfig->nrofPrachInSlot != 1)
183 if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology == 0)
185 else if (slot_id == 1)
190 else if (p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology == 3){
191 //for FR2, 38.211 tab 6.3.3.4 it is slot index of 60kHz slot
193 slotidx = subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
194 if (pPrachCPConfig->nrofPrachInSlot == 2){
195 if (pPrachCPConfig->isPRACHslot[slotidx>>1] == 1)
199 if ((pPrachCPConfig->isPRACHslot[slotidx>>1] == 1) && (slotidx % 2 == 1))
204 print_err("Numerology %d not supported", p_xran_dev_ctx->fh_cfg.frame_conf.nNumerology);
208 int xran_init_sectionid(void *pHandle)
212 for(cell=0; cell < XRAN_MAX_CELLS_PER_PORT; cell++) {
213 for(ant=0; ant < XRAN_MAX_ANTENNA_NR; ant++) {
214 xran_section_id[cell][ant] = 0;
215 xran_section_id_curslot[cell][ant] = 255;
222 int xran_init_prach(struct xran_fh_config* pConf, struct xran_device_ctx * p_xran_dev_ctx)
226 struct xran_prach_config* pPRACHConfig = &(pConf->prach_conf);
227 const xRANPrachConfigTableStruct *pxRANPrachConfigTable;
228 uint8_t nNumerology = pConf->frame_conf.nNumerology;
229 uint8_t nPrachConfIdx = pPRACHConfig->nPrachConfIdx;
230 xRANPrachCPConfigStruct *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
233 pxRANPrachConfigTable = &gxranPrachDataTable_mmw[nPrachConfIdx];
234 else if (pConf->frame_conf.nFrameDuplexType == 1)
235 pxRANPrachConfigTable = &gxranPrachDataTable_sub6_tdd[nPrachConfIdx];
237 pxRANPrachConfigTable = &gxranPrachDataTable_sub6_fdd[nPrachConfIdx];
239 uint8_t preambleFmrt = pxRANPrachConfigTable->preambleFmrt[0];
240 const xRANPrachPreambleLRAStruct *pxranPreambleforLRA = &gxranPreambleforLRA[preambleFmrt];
241 memset(pPrachCPConfig, 0, sizeof(xRANPrachCPConfigStruct));
243 printf("xRAN open PRACH config: Numerology %u ConfIdx %u, preambleFmrt %u startsymb %u, numSymbol %u, occassionsInPrachSlot %u\n", nNumerology, nPrachConfIdx, preambleFmrt, pxRANPrachConfigTable->startingSym, pxRANPrachConfigTable->duration, pxRANPrachConfigTable->occassionsInPrachSlot);
245 pPrachCPConfig->filterIdx = XRAN_FILTERINDEX_PRACH_ABC; // 3, PRACH preamble format A1~3, B1~4, C0, C2
246 pPrachCPConfig->startSymId = pxRANPrachConfigTable->startingSym;
247 pPrachCPConfig->startPrbc = pPRACHConfig->nPrachFreqStart;
248 pPrachCPConfig->numPrbc = (preambleFmrt >= FORMAT_A1)? 12 : 70;
249 pPrachCPConfig->timeOffset = pxranPreambleforLRA->nRaCp;
250 pPrachCPConfig->freqOffset = xran_get_freqoffset(pPRACHConfig->nPrachFreqOffset, pPRACHConfig->nPrachSubcSpacing);
251 pPrachCPConfig->x = pxRANPrachConfigTable->x;
252 pPrachCPConfig->nrofPrachInSlot = pxRANPrachConfigTable->nrofPrachInSlot;
253 pPrachCPConfig->y[0] = pxRANPrachConfigTable->y[0];
254 pPrachCPConfig->y[1] = pxRANPrachConfigTable->y[1];
255 if (preambleFmrt >= FORMAT_A1)
257 pPrachCPConfig->numSymbol = pxRANPrachConfigTable->duration;
258 pPrachCPConfig->occassionsInPrachSlot = pxRANPrachConfigTable->occassionsInPrachSlot;
262 pPrachCPConfig->numSymbol = 1;
263 pPrachCPConfig->occassionsInPrachSlot = 1;
267 printf("PRACH: x %u y[0] %u, y[1] %u prach slot: %u ..", pPrachCPConfig->x, pPrachCPConfig->y[0], pPrachCPConfig->y[1], pxRANPrachConfigTable->slotNr[0]);
268 pPrachCPConfig->isPRACHslot[pxRANPrachConfigTable->slotNr[0]] = 1;
269 for (i=1; i < XRAN_PRACH_CANDIDATE_SLOT; i++)
271 slotNr = pxRANPrachConfigTable->slotNr[i];
273 pPrachCPConfig->isPRACHslot[slotNr] = 1;
275 printf(" %u ..", slotNr);
279 for (i = 0; i < XRAN_MAX_SECTOR_NR; i++){
280 p_xran_dev_ctx->prach_start_symbol[i] = pPrachCPConfig->startSymId;
281 p_xran_dev_ctx->prach_last_symbol[i] = pPrachCPConfig->startSymId + pPrachCPConfig->numSymbol * pPrachCPConfig->occassionsInPrachSlot - 1;
283 if(pConf->log_level){
284 printf("PRACH start symbol %u lastsymbol %u\n", p_xran_dev_ctx->prach_start_symbol[0], p_xran_dev_ctx->prach_last_symbol[0]);
287 return (XRAN_STATUS_SUCCESS);
290 inline uint16_t xran_alloc_sectionid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id)
292 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
293 print_err("Invalid CC ID - %d", cc_id);
296 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) { //for PRACH, ant_id starts from num_ant
297 print_err("Invalid antenna ID - %d", ant_id);
301 /* if new slot has been started,
302 * then initializes section id again for new start */
303 if(xran_section_id_curslot[cc_id][ant_id] != slot_id) {
304 xran_section_id[cc_id][ant_id] = 0;
305 xran_section_id_curslot[cc_id][ant_id] = slot_id;
308 return(xran_section_id[cc_id][ant_id]++);
311 int xran_init_seqid(void *pHandle)
315 for(cell=0; cell < XRAN_MAX_CELLS_PER_PORT; cell++) {
316 for(dir=0; dir < XRAN_DIR_MAX; dir++) {
317 for(ant=0; ant < XRAN_MAX_ANTENNA_NR * 2; ant++)
318 xran_cp_seq_id_num[cell][dir][ant] = 0;
320 for(ant=0; ant < XRAN_MAX_ANTENNA_NR; ant++)
321 xran_updl_seq_id_num[cell][ant] = 0;
322 for(ant=0; ant < XRAN_MAX_ANTENNA_NR * 2; ant++)
323 xran_upul_seq_id_num[cell][ant] = 0;
329 static inline uint8_t xran_get_cp_seqid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id)
331 if(dir >= XRAN_DIR_MAX) {
332 print_err("Invalid direction - %d", dir);
335 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
336 print_err("Invalid CC ID - %d", cc_id);
339 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
340 print_err("Invalid antenna ID - %d", ant_id);
344 return(xran_cp_seq_id_num[cc_id][dir][ant_id]++);
346 static inline uint8_t xran_get_updl_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id)
348 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
349 print_err("Invalid CC ID - %d", cc_id);
352 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
353 print_err("Invalid antenna ID - %d", ant_id);
357 /* Only U-Plane DL needs to get sequence ID in O-DU */
358 return(xran_updl_seq_id_num[cc_id][ant_id]++);
360 static inline uint8_t *xran_get_updl_seqid_addr(void *pHandle, uint8_t cc_id, uint8_t ant_id)
362 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
363 print_err("Invalid CC ID - %d", cc_id);
366 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
367 print_err("Invalid antenna ID - %d", ant_id);
371 /* Only U-Plane DL needs to get sequence ID in O-DU */
372 return(&xran_updl_seq_id_num[cc_id][ant_id]);
374 static inline int8_t xran_check_upul_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id, uint8_t seq_id)
377 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
378 print_err("Invalid CC ID - %d", cc_id);
382 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
383 print_err("Invalid antenna ID - %d", ant_id);
387 /* O-DU needs to check the sequence ID of U-Plane UL from O-RU */
388 xran_upul_seq_id_num[cc_id][ant_id]++;
389 if(xran_upul_seq_id_num[cc_id][ant_id] == seq_id) { /* expected sequence */
390 return (XRAN_STATUS_SUCCESS);
392 print_err("expected seqid %u received %u, slot %u, ant %u cc %u", xran_upul_seq_id_num[cc_id][ant_id], seq_id, slot_id, ant_id, cc_id);
393 xran_upul_seq_id_num[cc_id][ant_id] = seq_id; // for next
398 //////////////////////////////////////////
400 static inline uint8_t xran_get_upul_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id)
402 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
403 print_err("Invalid CC ID - %d", cc_id);
406 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
407 print_err("Invalid antenna ID - %d", ant_id);
411 return(xran_upul_seq_id_num[cc_id][ant_id]++);
413 static inline uint8_t *xran_get_upul_seqid_addr(void *pHandle, uint8_t cc_id, uint8_t ant_id)
415 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
416 print_err("Invalid CC ID - %d", cc_id);
419 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
420 print_err("Invalid antenna ID - %d", ant_id);
424 return(&xran_upul_seq_id_num[cc_id][ant_id]);
426 static inline int8_t xran_check_cp_seqid(void *pHandle, uint8_t dir, uint8_t cc_id, uint8_t ant_id, uint8_t seq_id)
428 if(dir >= XRAN_DIR_MAX) {
429 print_err("Invalid direction - %d", dir);
432 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
433 print_err("Invalid CC ID - %d", cc_id);
436 if(ant_id >= XRAN_MAX_ANTENNA_NR * 2) {
437 print_err("Invalid antenna ID - %d", ant_id);
441 xran_cp_seq_id_num[cc_id][dir][ant_id]++;
442 if(xran_cp_seq_id_num[cc_id][dir][ant_id] == seq_id) { /* expected sequence */
446 xran_cp_seq_id_num[cc_id][dir][ant_id] = seq_id;
450 static inline int8_t xran_check_updl_seqid(void *pHandle, uint8_t cc_id, uint8_t ant_id, uint8_t slot_id, uint8_t seq_id)
452 if(cc_id >= XRAN_MAX_CELLS_PER_PORT) {
453 print_err("Invalid CC ID - %d", cc_id);
457 if(ant_id >= XRAN_MAX_ANTENNA_NR) {
458 print_err("Invalid antenna ID - %d", ant_id);
462 /* O-RU needs to check the sequence ID of U-Plane DL from O-DU */
463 xran_updl_seq_id_num[cc_id][ant_id]++;
464 if(xran_updl_seq_id_num[cc_id][ant_id] == seq_id) {
465 /* expected sequence */
468 xran_updl_seq_id_num[cc_id][ant_id] = seq_id;
474 static struct xran_section_gen_info cpSections[XRAN_MAX_NUM_SECTIONS];
475 static struct xran_cp_gen_params cpInfo;
476 int process_cplane(struct rte_mbuf *pkt)
478 struct xran_recv_packet_info recv;
480 cpInfo.sections = cpSections;
481 xran_parse_cp_pkt(pkt, &cpInfo, &recv);
485 //////////////////////////////////////////
487 void sym_ota_cb(struct rte_timer *tim, void *arg)
489 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
490 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
491 long t1 = MLogTick();
492 static int32_t ctx = 0;
494 if(XranGetSymNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT) == 0){
495 tti_ota_cb(NULL, arg);
498 if(XranGetSymNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT) == 3){
499 if(p_xran_dev_ctx->phy_tti_cb_done == 0){
500 /* rearm timer to deliver TTI event to PHY */
501 p_xran_dev_ctx->phy_tti_cb_done = 0;
502 xran_timer_arm_ex(&tti_to_phy_timer[xran_lib_ota_tti % 10], tti_to_phy_cb, (void*)pTCtx, p_xran_dev_ctx->fh_init.io_cfg.timing_core);
506 xran_process_tx_sym(timer_ctx);
507 /* check if there is call back to do something else on this symbol */
508 if(p_xran_dev_ctx->pSymCallback[0][xran_lib_ota_sym]){
509 p_xran_dev_ctx->pSymCallback[0][xran_lib_ota_sym](&dpdk_timer[ctx], p_xran_dev_ctx->pSymCallbackTag[0][xran_lib_ota_sym]);
510 ctx = DpdkTiemerIncrementCtx(ctx);
514 if(xran_lib_ota_sym >= N_SYM_PER_SLOT){
518 MLogTask(PID_SYM_OTA_CB, t1, MLogTick());
521 void tti_ota_cb(struct rte_timer *tim, void *arg)
523 uint32_t frame_id = 0;
524 uint32_t subframe_id = 0;
525 uint32_t slot_id = 0;
526 uint32_t next_tti = 0;
528 uint32_t mlogVar[10];
529 uint32_t mlogVarCnt = 0;
530 uint64_t t1 = MLogTick();
532 uint32_t reg_tti = 0;
533 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
534 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
536 MLogTask(PID_TTI_TIMER, t1, MLogTick());
539 if(xran_lib_ota_tti == 0)
540 reg_tti = xran_fs_get_max_slot() - 1;
542 reg_tti = xran_lib_ota_tti -1;
543 MLogIncrementCounter();
544 /* subframe and slot */
545 MLogRegisterFrameSubframe(((reg_tti/SLOTNUM_PER_SUBFRAME) % SUBFRAMES_PER_SYSTEMFRAME),
546 reg_tti % (SLOTNUM_PER_SUBFRAME));
549 slot_id = XranGetSlotNum(xran_lib_ota_tti, SLOTNUM_PER_SUBFRAME);
550 subframe_id = XranGetSubFrameNum(xran_lib_ota_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
551 frame_id = XranGetFrameNum(xran_lib_ota_tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
553 pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process = xran_lib_ota_tti;
555 mlogVar[mlogVarCnt++] = 0x11111111;
556 mlogVar[mlogVarCnt++] = xran_lib_ota_tti;
557 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx;
558 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx / 14;
559 mlogVar[mlogVarCnt++] = frame_id;
560 mlogVar[mlogVarCnt++] = subframe_id;
561 mlogVar[mlogVarCnt++] = slot_id;
562 mlogVar[mlogVarCnt++] = 0;
563 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
565 if(p_xran_dev_ctx->fh_init.io_cfg.id == ID_LLS_CU)
566 next_tti = xran_lib_ota_tti + 1;
568 next_tti = xran_lib_ota_tti;
570 if(next_tti>= xran_fs_get_max_slot()){
571 print_dbg("[%d]SFN %d sf %d slot %d\n",next_tti, frame_id, subframe_id, slot_id);
575 slot_id = XranGetSlotNum(next_tti, SLOTNUM_PER_SUBFRAME);
576 subframe_id = XranGetSubFrameNum(next_tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
577 frame_id = XranGetFrameNum(next_tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
579 print_dbg("[%d]SFN %d sf %d slot %d\n",next_tti, frame_id, subframe_id, slot_id);
581 if(p_xran_dev_ctx->fh_init.io_cfg.id == ID_LLS_CU){
582 pTCtx[(xran_lib_ota_tti & 1)].tti_to_process = next_tti;
584 pTCtx[(xran_lib_ota_tti & 1)].tti_to_process = pTCtx[(xran_lib_ota_tti & 1)^1].tti_to_process;
587 p_xran_dev_ctx->phy_tti_cb_done = 0;
588 xran_timer_arm_ex(&tti_to_phy_timer[xran_lib_ota_tti % 10], tti_to_phy_cb, (void*)pTCtx, p_xran_dev_ctx->fh_init.io_cfg.timing_core);
591 if(xran_lib_ota_tti >= xran_fs_get_max_slot()){
592 print_dbg("[%d]SFN %d sf %d slot %d\n",xran_lib_ota_tti, frame_id, subframe_id, slot_id);
595 MLogTask(PID_TTI_CB, t1, MLogTick());
598 void xran_timer_arm(struct rte_timer *tim, void* arg)
600 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
601 uint64_t t3 = MLogTick();
603 if (xran_if_current_state == XRAN_RUNNING){
604 rte_timer_cb_t fct = (rte_timer_cb_t)arg;
606 rte_timer_reset_sync(tim, 0, SINGLE, p_xran_dev_ctx->fh_init.io_cfg.timing_core, fct, &timer_ctx[0]);
608 MLogTask(PID_TIME_ARM_TIMER, t3, MLogTick());
611 void xran_timer_arm_ex(struct rte_timer *tim, void* CbFct, void *CbArg, unsigned tim_lcore)
613 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
614 uint64_t t3 = MLogTick();
616 if (xran_if_current_state == XRAN_RUNNING){
617 rte_timer_cb_t fct = (rte_timer_cb_t)CbFct;
619 rte_timer_reset_sync(tim, 0, SINGLE, tim_lcore, fct, CbArg);
621 MLogTask(PID_TIME_ARM_TIMER, t3, MLogTick());
624 int xran_cp_create_and_send_section(void *pHandle, uint8_t ru_port_id, int dir, int tti, int cc_id,
625 struct xran_prb_map *prbMapElem)
627 struct xran_cp_gen_params params;
628 struct xran_section_gen_info sect_geninfo[XRAN_MAX_NUM_SECTIONS];
629 struct rte_mbuf *mbuf;
632 uint32_t nsection = prbMapElem->nPrbElm;
633 struct xran_prb_elm *pPrbMapElem = &prbMapElem->prbMap[0];
634 struct xran_prb_elm *pPrbMapElemPrev;
635 uint32_t slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
636 uint32_t subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
637 uint32_t frame_id = XranGetFrameNum(tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
638 uint8_t seq_id = xran_get_cp_seqid(pHandle, XRAN_DIR_DL, cc_id, ru_port_id);
641 params.sectionType = XRAN_CP_SECTIONTYPE_1; // Most DL/UL Radio Channels
642 params.hdr.filterIdx = XRAN_FILTERINDEX_STANDARD;
643 params.hdr.frameId = frame_id;
644 params.hdr.subframeId = subframe_id;
645 params.hdr.slotId = slot_id;
646 params.hdr.startSymId = pPrbMapElem->nStartSymb;
647 params.hdr.iqWidth = xran_get_conf_iqwidth(pHandle);
648 params.hdr.compMeth = pPrbMapElem->compMethod;
650 for (i=0; i<nsection; i++)
652 pPrbMapElem = &prbMapElem->prbMap[i];
653 sect_geninfo[i].info.type = params.sectionType; // for database
654 sect_geninfo[i].info.startSymId = params.hdr.startSymId; // for database
655 sect_geninfo[i].info.iqWidth = params.hdr.iqWidth; // for database
656 sect_geninfo[i].info.compMeth = params.hdr.compMeth; // for database
657 sect_geninfo[i].info.id = xran_alloc_sectionid(pHandle, dir, cc_id, ru_port_id, slot_id);
658 sect_geninfo[i].info.rb = XRAN_RBIND_EVERY;
659 sect_geninfo[i].info.startPrbc = pPrbMapElem->nRBStart;
660 sect_geninfo[i].info.numPrbc = pPrbMapElem->nRBSize;
661 sect_geninfo[i].info.numSymbol = pPrbMapElem->numSymb;
662 sect_geninfo[i].info.reMask = 0xfff;
663 sect_geninfo[i].info.beamId = pPrbMapElem->nBeamIndex;
665 sect_geninfo[i].info.symInc = XRAN_SYMBOLNUMBER_NOTINC;
668 pPrbMapElemPrev = &prbMapElem->prbMap[i-1];
669 if (pPrbMapElemPrev->nStartSymb == pPrbMapElem->nStartSymb)
671 sect_geninfo[i].info.symInc = XRAN_SYMBOLNUMBER_NOTINC;
672 if (pPrbMapElemPrev->numSymb != pPrbMapElem->numSymb)
673 print_err("section info error: previous numSymb %d not equal to current numSymb %d\n", pPrbMapElemPrev->numSymb, pPrbMapElem->numSymb);
677 sect_geninfo[i].info.symInc = XRAN_SYMBOLNUMBER_INC;
678 if (pPrbMapElem->nStartSymb != (pPrbMapElemPrev->nStartSymb + pPrbMapElemPrev->numSymb))
679 print_err("section info error: current startSym %d not equal to previous endSymb %d\n", pPrbMapElem->nStartSymb, pPrbMapElemPrev->nStartSymb + pPrbMapElemPrev->numSymb);
683 /* extension is not supported */
684 sect_geninfo[nsection].info.ef = 0;
685 sect_geninfo[nsection].exDataSize = 0;
686 //sect_geninfo[nsection].exData = NULL;
688 params.numSections = nsection;
689 params.sections = sect_geninfo;
691 mbuf = xran_ethdi_mbuf_alloc();
692 if(unlikely(mbuf == NULL)) {
693 print_err("Alloc fail!\n");
697 ret = xran_prepare_ctrl_pkt(mbuf, ¶ms, cc_id, ru_port_id, seq_id);
699 print_err("Fail to build control plane packet - [%d:%d:%d] dir=%d\n",
700 frame_id, subframe_id, slot_id, dir);
702 /* add in the ethernet header */
703 struct ether_hdr *const h = (void *)rte_pktmbuf_prepend(mbuf, sizeof(*h));
704 xran_ethdi_mbuf_send_cp(mbuf, ETHER_TYPE_ECPRI);
706 for(i=0; i<nsection; i++)
707 xran_cp_add_section_info(pHandle,
708 dir, cc_id, ru_port_id,
709 (slot_id + subframe_id*SLOTNUM_PER_SUBFRAME)%XRAN_MAX_SECTIONDB_CTX,
710 §_geninfo[i].info);
716 int xran_cp_create_rbmap(int dir, int tti, int cc_id,
717 struct xran_flat_buffer *prbMapElem,
718 struct xran_cp_rbmap_list *rbMapList)
720 struct xran_prb_map *prb_map;
725 prb_map = (struct xran_prb_map *)prbMapElm[0].pData;
726 cc_id = prb_map->cc_id;
727 tti = prb_map->tti_id;
732 for(sym_id = 0; sym_id < N_SYM_PER_SLOT; sym_id++) {
733 /* skip symbol, if not matched with given direction */
734 int type_sym = xran_fs_get_symbol_type(cc_id, tti, sym_id);
735 if(type_sym != XRAN_SYMBOL_TYPE_FDD && type_sym != dir)
738 /* retrieve the information of RB allocation */
739 prb_map = (struct xran_prb_map *)prbMapElem[sym_id].pData;
740 if(unlikely(prb_map == NULL)) {
741 print_err("RB allocation table is NULL! (tti:%d, cc:%d, sym_id:%d)", tti, cc_id, sym_id);
745 /* creating 2D mapping */
746 for(i=0; i < prb_map->nPrbElm; i++) {
747 if(list_index < 0) { /* create first entry */
749 rbMapList[list_index].grp_id = 0;
750 rbMapList[list_index].sym_start = sym_id; // prb_map->sym_id
751 rbMapList[list_index].sym_num = 1;
752 rbMapList[list_index].rb_start = prb_map->prbMap[i].nRBStart;
753 rbMapList[list_index].rb_num = prb_map->prbMap[i].nRBSize;
754 rbMapList[list_index].beam_id = prb_map->prbMap[i].nBeamIndex;
755 rbMapList[list_index].comp_meth = prb_map->prbMap[i].compMethod;
758 /* find consecutive allocation from list */
759 for(j=0; j<list_index+1; j++) {
760 if(prb_map->prbMap[i].nRBStart != rbMapList[j].rb_start
761 || prb_map->prbMap[i].nRBSize != rbMapList[j].rb_num
762 || prb_map->prbMap[i].nBeamIndex != rbMapList[j].beam_id
763 || prb_map->prbMap[i].compMethod != rbMapList[j].comp_meth
764 || sym_id != (rbMapList[j].sym_start+rbMapList[j].sym_num)) {
769 /* consecutive allocation has been found */
770 rbMapList[j].sym_num++;
775 if(j == list_index+1) { /* different allocation, create new entry */
777 rbMapList[list_index].grp_id = 0;
778 rbMapList[list_index].sym_start = sym_id; // prb_map->sym_id
779 rbMapList[list_index].sym_num = 1;
780 rbMapList[list_index].rb_start = prb_map->prbMap[i].nRBStart;
781 rbMapList[list_index].rb_num = prb_map->prbMap[i].nRBSize;
782 rbMapList[list_index].beam_id = prb_map->prbMap[i].nBeamIndex;
783 rbMapList[list_index].comp_meth = prb_map->prbMap[i].compMethod;
786 } /* for(i=0; i < prb_map->nPrbElm; i++) */
787 } /* for(sym_id = 0; sym_id < N_SYM_PER_SLOT; sym_id++) */
792 for(i=0; i<list_index; i++) {
793 printf("[%c:%d-%d] %d - symstart=%d, symnum=%d, rbstart=%d, rbnum=%d, beamid=%d, comp=%d\n",
794 dir?'U':'D', tti, cc_id, i,
795 rbMapList[i].sym_start, rbMapList[i].sym_num,
796 rbMapList[i].rb_start, rbMapList[i].rb_num,
797 rbMapList[i].beam_id, rbMapList[i].comp_meth);
804 void tx_cp_dl_cb(struct rte_timer *tim, void *arg)
806 long t1 = MLogTick();
809 uint32_t slot_id, subframe_id, frame_id;
812 uint8_t ant_id, num_eAxc, num_CCPorts;
815 struct xran_cp_rbmap_list rb_map_list[XRAN_MAX_PRBS*N_SYM_PER_SLOT]; /* array size can be reduced */
817 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
818 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
820 pHandle = NULL; // TODO: temp implemantation
821 num_eAxc = xran_get_num_eAxc(pHandle);
822 num_CCPorts = xran_get_num_cc(pHandle);
824 if(p_xran_dev_ctx->enableCP) {
826 tti = pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process;
827 buf_id = tti % XRAN_N_FE_BUF_LEN;
829 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
830 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
831 frame_id = XranGetFrameNum(tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
832 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
834 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
836 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
837 for(cc_id = 0; cc_id < num_CCPorts; cc_id++ ) {
839 /* start new section information list */
840 xran_cp_reset_section_info(pHandle, XRAN_DIR_DL, cc_id, ant_id, ctx_id);
842 if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_DL) == 1) { // 1 when FDD, DL slot or DL symbol is present in SP slot
843 if (p_xran_dev_ctx->DynamicSectionEna){
844 num_list = xran_cp_create_and_send_section(pHandle, ant_id, XRAN_SYMBOL_TYPE_DL, tti, cc_id,
845 (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData);
848 struct xran_cp_gen_params params;
849 struct xran_section_gen_info sect_geninfo[8];
850 struct rte_mbuf *mbuf = xran_ethdi_mbuf_alloc();
852 /* use symb 0 only with constant RBs for full slot */
853 struct xran_prb_map *prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData;
855 rb_map_list[0].sym_start = 0;
856 rb_map_list[0].sym_num = 14;
857 rb_map_list[0].rb_start = prb_map->prbMap[0].nRBStart;
858 rb_map_list[0].rb_num = prb_map->prbMap[0].nRBSize;
859 rb_map_list[0].beam_id = prb_map->prbMap[0].nBeamIndex;
860 rb_map_list[0].comp_meth = prb_map->prbMap[0].compMethod;
862 for(i=0; i<num_list; i++) {
863 ret = generate_cpmsg_dlul(pHandle, ¶ms, sect_geninfo, mbuf, XRAN_DIR_DL,
864 frame_id, subframe_id, slot_id,
865 rb_map_list[i].sym_start, rb_map_list[i].sym_num,
866 rb_map_list[i].rb_start, rb_map_list[i].rb_num,
867 rb_map_list[i].beam_id, cc_id, ant_id, rb_map_list[i].comp_meth,
868 xran_get_cp_seqid(pHandle, XRAN_DIR_DL, cc_id, ant_id), XRAN_SYMBOLNUMBER_NOTINC);
870 if (ret == XRAN_STATUS_SUCCESS)
871 send_cpmsg(pHandle, mbuf, ¶ms, sect_geninfo,
872 cc_id, ant_id, xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, ant_id));
875 } /* if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_DL) == 1) */
876 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
877 } /* for(ant_id = 0; ant_id < num_eAxc; ++ant_id) */
878 } /* if(p_xran_dev_ctx->enableCP) */
880 MLogTask(PID_CP_DL_CB, t1, MLogTick());
883 void rx_ul_deadline_half_cb(struct rte_timer *tim, void *arg)
885 long t1 = MLogTick();
886 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
887 xran_status_t status;
888 /* half of RX for current TTI as measured against current OTA time */
889 int32_t rx_tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
892 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
895 for(cc_id = 0; cc_id < xran_get_num_cc(p_xran_dev_ctx); cc_id++) {
896 if(p_xran_dev_ctx->rx_packet_callback_tracker[rx_tti % XRAN_N_FE_BUF_LEN][cc_id] == 0){
897 status = (rx_tti << 16) | 0; /* base on PHY side implementation first 7 sym of slot */
898 if(p_xran_dev_ctx->pCallback[cc_id])
899 p_xran_dev_ctx->pCallback[cc_id](p_xran_dev_ctx->pCallbackTag[cc_id], status);
901 p_xran_dev_ctx->rx_packet_callback_tracker[rx_tti % XRAN_N_FE_BUF_LEN][cc_id] = 0;
904 MLogTask(PID_UP_UL_HALF_DEAD_LINE_CB, t1, MLogTick());
907 void rx_ul_deadline_full_cb(struct rte_timer *tim, void *arg)
909 long t1 = MLogTick();
910 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
911 xran_status_t status = 0;
912 int32_t rx_tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
916 rx_tti = (xran_fs_get_max_slot()-1);
918 rx_tti -= 1; /* end of RX for prev TTI as measured against current OTA time */
920 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
924 for(cc_id = 0; cc_id < xran_get_num_cc(p_xran_dev_ctx); cc_id++) {
925 status = (rx_tti << 16) | 7; /* last 7 sym means full slot of Symb */
926 if(p_xran_dev_ctx->pCallback[cc_id])
927 p_xran_dev_ctx->pCallback[cc_id](p_xran_dev_ctx->pCallbackTag[cc_id], status);
929 if(p_xran_dev_ctx->pPrachCallback[cc_id])
930 p_xran_dev_ctx->pPrachCallback[cc_id](p_xran_dev_ctx->pPrachCallbackTag[cc_id], status);
934 MLogTask(PID_UP_UL_FULL_DEAD_LINE_CB, t1, MLogTick());
938 void tx_cp_ul_cb(struct rte_timer *tim, void *arg)
940 long t1 = MLogTick();
943 uint32_t slot_id, subframe_id, frame_id;
945 int ant_id, prach_port_id;
947 uint8_t num_eAxc, num_CCPorts;
952 struct xran_cp_rbmap_list rb_map_list[XRAN_MAX_PRBS*N_SYM_PER_SLOT]; /* array size can be reduced */
954 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
955 xRANPrachCPConfigStruct *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
956 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
958 pHandle = NULL; // TODO: temp implemantation
959 num_eAxc = xran_get_num_eAxc(pHandle);
960 num_CCPorts = xran_get_num_cc(pHandle);
961 tti = pTCtx[(xran_lib_ota_tti & 1) ^ 1].tti_to_process;
962 buf_id = tti % XRAN_N_FE_BUF_LEN;
963 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
964 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
965 frame_id = XranGetFrameNum(tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
966 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
968 if(p_xran_dev_ctx->enableCP) {
970 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
972 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
973 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
974 if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_UL) == 1 ||
975 xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_SP) == 1 ){
976 /* start new section information list */
977 xran_cp_reset_section_info(pHandle, XRAN_DIR_UL, cc_id, ant_id, ctx_id);
978 if (p_xran_dev_ctx->DynamicSectionEna){
979 /* create a map of RB allocation to generate proper C-Plane */
980 num_list = xran_cp_create_and_send_section(pHandle, ant_id, XRAN_SYMBOL_TYPE_UL, tti, cc_id,
981 (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData);
984 struct xran_cp_gen_params params;
985 struct xran_section_gen_info sect_geninfo[8];
986 struct rte_mbuf *mbuf = xran_ethdi_mbuf_alloc();
987 /* use symb 0 only with constant RBs for full slot */
988 struct xran_prb_map *prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[buf_id][cc_id][ant_id].sBufferList.pBuffers->pData;
990 rb_map_list[0].sym_start = 0;
991 rb_map_list[0].sym_num = 14;
992 rb_map_list[0].rb_start = prb_map->prbMap[0].nRBStart;
993 rb_map_list[0].rb_num = prb_map->prbMap[0].nRBSize;
994 rb_map_list[0].beam_id = prb_map->prbMap[0].nBeamIndex;
995 rb_map_list[0].comp_meth = prb_map->prbMap[0].compMethod;
997 for(i=0; i<num_list; i++) {
998 ret = generate_cpmsg_dlul(pHandle, ¶ms, sect_geninfo, mbuf, XRAN_DIR_UL,
999 frame_id, subframe_id, slot_id,
1000 rb_map_list[i].sym_start, rb_map_list[i].sym_num,
1001 rb_map_list[i].rb_start, rb_map_list[i].rb_num,
1002 rb_map_list[i].beam_id, cc_id, ant_id, rb_map_list[i].comp_meth,
1003 xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, ant_id), XRAN_SYMBOLNUMBER_NOTINC);
1004 if (ret == XRAN_STATUS_SUCCESS)
1005 send_cpmsg(pHandle, mbuf, ¶ms, sect_geninfo,
1006 cc_id, ant_id, xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, ant_id));
1009 } /* if(xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_UL) == 1 || */
1011 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
1012 } /* for(ant_id = 0; ant_id < num_eAxc; ++ant_id) */
1013 } /* if(p_xran_dev_ctx->enableCP) */
1015 if(p_xran_dev_ctx->enablePrach) {
1016 uint32_t isPRACHslot = xran_isPRACHSlot(subframe_id, slot_id);
1017 if((frame_id % pPrachCPConfig->x == pPrachCPConfig->y[0]) && (isPRACHslot==1)) { //is prach slot
1018 for(ant_id = 0; ant_id < num_eAxc; ++ant_id) {
1019 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1020 struct xran_cp_gen_params params;
1021 struct xran_section_gen_info sect_geninfo[8];
1022 struct rte_mbuf *mbuf = xran_ethdi_mbuf_alloc();
1023 prach_port_id = ant_id + num_eAxc;
1024 /* start new section information list */
1025 xran_cp_reset_section_info(pHandle, XRAN_DIR_UL, cc_id, prach_port_id, ctx_id);
1027 beam_id = xran_get_beamid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id, slot_id); /* TODO: */
1028 ret = generate_cpmsg_prach(pHandle, ¶ms, sect_geninfo, mbuf, p_xran_dev_ctx,
1029 frame_id, subframe_id, slot_id,
1030 beam_id, cc_id, prach_port_id,
1031 xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id));
1032 if (ret == XRAN_STATUS_SUCCESS)
1033 send_cpmsg(pHandle, mbuf, ¶ms, sect_geninfo,
1034 cc_id, prach_port_id, xran_get_cp_seqid(pHandle, XRAN_DIR_UL, cc_id, prach_port_id));
1040 MLogTask(PID_CP_UL_CB, t1, MLogTick());
1043 void ul_up_full_slot_cb(struct rte_timer *tim, void *arg)
1045 long t1 = MLogTick();
1047 MLogTask(PID_TTI_CB_TO_PHY, t1, MLogTick());
1050 void tti_to_phy_cb(struct rte_timer *tim, void *arg)
1052 long t1 = MLogTick();
1053 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1055 static int first_call = 0;
1056 p_xran_dev_ctx->phy_tti_cb_done = 1; /* DPDK called CB */
1058 if(p_xran_dev_ctx->ttiCb[XRAN_CB_TTI]){
1059 if(p_xran_dev_ctx->SkipTti[XRAN_CB_TTI] <= 0){
1060 p_xran_dev_ctx->ttiCb[XRAN_CB_TTI](p_xran_dev_ctx->TtiCbParam[XRAN_CB_TTI]);
1062 p_xran_dev_ctx->SkipTti[XRAN_CB_TTI]--;
1066 if(p_xran_dev_ctx->ttiCb[XRAN_CB_TTI]){
1067 int32_t tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1068 if(tti == xran_fs_get_max_slot()-1)
1074 MLogTask(PID_TTI_CB_TO_PHY, t1, MLogTick());
1077 int xran_timing_source_thread(void *args)
1080 int32_t do_reset = 0;
1084 uint32_t delay_cp_dl;
1085 uint32_t delay_cp_ul;
1087 uint32_t delay_up_ul;
1088 uint32_t delay_cp2up;
1093 struct sched_param sched_param;
1094 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1096 /* ToS = Top of Second start +- 1.5us */
1101 printf("%s [CPU %2d] [PID: %6d]\n", __FUNCTION__, rte_lcore_id(), getpid());
1103 /* set main thread affinity mask to CPU2 */
1104 sched_param.sched_priority = 98;
1107 CPU_SET(p_xran_dev_ctx->fh_init.io_cfg.timing_core, &cpuset);
1108 if (result1 = pthread_setaffinity_np(pthread_self(), sizeof(cpu_set_t), &cpuset))
1110 printf("pthread_setaffinity_np failed: coreId = 2, result1 = %d\n",result1);
1112 if ((result1 = pthread_setschedparam(pthread_self(), 1, &sched_param)))
1114 printf("priority is not changed: coreId = 2, result1 = %d\n",result1);
1117 if (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
1119 timespec_get(&ts, TIME_UTC);
1120 }while (ts.tv_nsec >1500);
1121 struct tm * ptm = gmtime(&ts.tv_sec);
1123 strftime(buff, sizeof buff, "%D %T", ptm);
1124 printf("lls-CU: thread_run start time: %s.%09ld UTC [%ld]\n", buff, ts.tv_nsec, interval_us);
1127 delay_cp_dl = interval_us - p_xran_dev_ctx->fh_init.T1a_max_cp_dl;
1128 delay_cp_ul = interval_us - p_xran_dev_ctx->fh_init.T1a_max_cp_ul;
1129 delay_up = p_xran_dev_ctx->fh_init.T1a_max_up;
1130 delay_up_ul = p_xran_dev_ctx->fh_init.Ta4_max;
1132 delay_cp2up = delay_up-delay_cp_dl;
1134 sym_cp_dl = delay_cp_dl*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1135 sym_cp_ul = delay_cp_ul*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1136 sym_up_ul = delay_up_ul*1000/(interval_us*1000/N_SYM_PER_SLOT);
1137 p_xran_dev_ctx->sym_up = sym_up = -(delay_up*1000/(interval_us*1000/N_SYM_PER_SLOT)+1);
1138 p_xran_dev_ctx->sym_up_ul = sym_up_ul = (delay_up_ul*1000/(interval_us*1000/N_SYM_PER_SLOT)+1);
1140 printf("Start C-plane DL %d us after TTI [trigger on sym %d]\n", delay_cp_dl, sym_cp_dl);
1141 printf("Start C-plane UL %d us after TTI [trigger on sym %d]\n", delay_cp_ul, sym_cp_ul);
1142 printf("Start U-plane DL %d us before OTA [offset in sym %d]\n", delay_up, sym_up);
1143 printf("Start U-plane UL %d us OTA [offset in sym %d]\n", delay_up_ul, sym_up_ul);
1145 printf("C-plane to U-plane delay %d us after TTI\n", delay_cp2up);
1146 printf("Start Sym timer %ld ns\n", TX_TIMER_INTERVAL/N_SYM_PER_SLOT);
1148 p_xran_dev_ctx->pSymCallback[0][sym_cp_dl] = xran_timer_arm;
1149 p_xran_dev_ctx->pSymCallbackTag[0][sym_cp_dl] = tx_cp_dl_cb;
1151 p_xran_dev_ctx->pSymCallback[0][sym_cp_ul] = xran_timer_arm;
1152 p_xran_dev_ctx->pSymCallbackTag[0][sym_cp_ul] = tx_cp_ul_cb;
1154 /* Full slot UL OTA + delay_up_ul */
1155 p_xran_dev_ctx->pSymCallback[0][sym_up_ul] = xran_timer_arm;
1156 p_xran_dev_ctx->pSymCallbackTag[0][sym_up_ul] = rx_ul_deadline_full_cb;
1158 /* Half slot UL OTA + delay_up_ul*/
1159 p_xran_dev_ctx->pSymCallback[0][sym_up_ul + N_SYM_PER_SLOT/2] = xran_timer_arm;
1160 p_xran_dev_ctx->pSymCallbackTag[0][sym_up_ul + N_SYM_PER_SLOT/2] = rx_ul_deadline_half_cb;
1162 } else { // APP_O_RU
1163 /* calcualte when to send UL U-plane */
1164 delay_up = p_xran_dev_ctx->fh_init.Ta3_min;
1165 p_xran_dev_ctx->sym_up = sym_up = delay_up*1000/(interval_us*1000/N_SYM_PER_SLOT)+1;
1166 printf("Start UL U-plane %d us after OTA [offset in sym %d]\n", delay_up, sym_up);
1168 timespec_get(&ts, TIME_UTC);
1169 }while (ts.tv_nsec >1500);
1170 struct tm * ptm = gmtime(&ts.tv_sec);
1172 strftime(buff, sizeof buff, "%D %T", ptm);
1173 printf("RU: thread_run start time: %s.%09ld UTC [%ld]\n", buff, ts.tv_nsec, interval_us);
1177 printf("interval_us %ld\n", interval_us);
1179 timespec_get(&ts, TIME_UTC);
1180 }while (ts.tv_nsec == 0);
1183 delta = poll_next_tick(interval_us*1000L/N_SYM_PER_SLOT);
1184 if (XRAN_STOPPED == xran_if_current_state)
1187 if (likely(XRAN_RUNNING == xran_if_current_state))
1188 sym_ota_cb(&sym_timer, timer_ctx);
1190 printf("Closing timing source thread...tx counter %lu, rx counter %lu\n", tx_counter, rx_counter);
1195 /* Handle ecpri format. */
1196 int handle_ecpri_ethertype(struct rte_mbuf *pkt, uint64_t rx_time)
1198 const struct xran_ecpri_hdr *ecpri_hdr;
1200 int32_t ret = MBUF_FREE;
1202 if (rte_pktmbuf_data_len(pkt) < sizeof(struct xran_ecpri_hdr)) {
1203 print_err("Packet too short - %d bytes", rte_pktmbuf_data_len(pkt));
1207 /* check eCPRI header. */
1208 ecpri_hdr = rte_pktmbuf_mtod(pkt, struct xran_ecpri_hdr *);
1209 if(ecpri_hdr == NULL){
1210 print_err("ecpri_hdr error\n");
1214 switch(ecpri_hdr->cmnhdr.ecpri_mesg_type) {
1217 ret = process_mbuf(pkt);
1218 // MLogTask(PID_PROCESS_UP_PKT, t1, MLogTick());
1221 case ECPRI_RT_CONTROL_DATA:
1223 if(xran_dev_get_ctx()->fh_init.io_cfg.id == O_RU) {
1224 ret = process_cplane(pkt);
1226 print_err("O-DU recevied C-Plane message!");
1228 MLogTask(PID_PROCESS_CP_PKT, t1, MLogTick());
1231 print_err("Invalid eCPRI message type - %d", ecpri_hdr->cmnhdr.ecpri_mesg_type);
1237 int xran_process_prach_sym(void *arg,
1238 struct rte_mbuf *mbuf,
1239 void *iq_data_start,
1244 uint8_t subframe_id,
1248 uint16_t start_prbu,
1254 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1255 uint8_t symb_id_offset;
1257 xran_status_t status;
1258 void *pHandle = NULL;
1259 struct rte_mbuf *mb;
1261 uint16_t iq_sample_size_bits = 16;
1263 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1266 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1268 status = tti << 16 | symb_id;
1270 if(tti < xran_fs_get_max_slot() && CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < XRAN_MAX_ANTENNA_NR && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT){
1271 symb_id_offset = symb_id - p_xran_dev_ctx->prach_start_symbol[CC_ID]; //make the storing of prach packets to start from 0 for easy of processing within PHY
1272 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pData;
1273 if(pos && iq_data_start && size){
1274 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1276 uint16_t *psrc = (uint16_t *)iq_data_start;
1277 uint16_t *pdst = (uint16_t *)pos;
1278 /* network byte (be) order of IQ to CPU byte order (le) */
1279 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1280 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1283 mb = p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pCtrl;
1284 rte_pktmbuf_free(mb);
1285 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pData = iq_data_start;
1286 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id_offset].pCtrl = mbuf;
1288 #ifdef DEBUG_XRAN_BUFFERS
1289 if (pos[0] != tti % XRAN_N_FE_BUF_LEN ||
1293 printf("%d %d %d %d\n", pos[0], pos[1], pos[2], pos[3]);
1297 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1300 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1303 /* if (symb_id == p_xran_dev_ctx->prach_last_symbol[CC_ID] ){
1304 p_xran_dev_ctx->rx_packet_prach_tracker[tti % XRAN_N_FE_BUF_LEN][CC_ID][symb_id]++;
1305 if(p_xran_dev_ctx->rx_packet_prach_tracker[tti % XRAN_N_FE_BUF_LEN][CC_ID][symb_id] >= xran_get_num_eAxc(pHandle)){
1306 if(p_xran_dev_ctx->pPrachCallback[0])
1307 p_xran_dev_ctx->pPrachCallback[0](p_xran_dev_ctx->pPrachCallbackTag[0], status);
1308 p_xran_dev_ctx->rx_packet_prach_tracker[tti % XRAN_N_FE_BUF_LEN][CC_ID][symb_id] = 0;
1315 int32_t xran_pkt_validate(void *arg,
1316 struct rte_mbuf *mbuf,
1317 void *iq_data_start,
1322 uint8_t subframe_id,
1325 struct ecpri_seq_id *seq_id,
1327 uint16_t start_prbu,
1332 struct xran_device_ctx * pctx = xran_dev_get_ctx();
1333 struct xran_common_counters *pCnt = &pctx->fh_counters;
1335 if(pctx->fh_init.io_cfg.id == O_DU) {
1336 if(xran_check_upul_seqid(NULL, CC_ID, Ant_ID, slot_id, seq_id->seq_id) != XRAN_STATUS_SUCCESS) {
1337 pCnt->Rx_pkt_dupl++;
1338 return (XRAN_STATUS_FAIL);
1340 }else if(pctx->fh_init.io_cfg.id == O_RU) {
1341 if(xran_check_updl_seqid(NULL, CC_ID, Ant_ID, slot_id, seq_id->seq_id) != XRAN_STATUS_SUCCESS) {
1342 pCnt->Rx_pkt_dupl++;
1343 return (XRAN_STATUS_FAIL);
1346 print_err("incorrect dev type %d\n", pctx->fh_init.io_cfg.id);
1352 pCnt->Total_msgs_rcvd++;
1354 return XRAN_STATUS_SUCCESS;
1357 int xran_process_rx_sym(void *arg,
1358 struct rte_mbuf *mbuf,
1359 void *iq_data_start,
1364 uint8_t subframe_id,
1368 uint16_t start_prbu,
1375 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1377 xran_status_t status;
1378 void *pHandle = NULL;
1379 struct rte_mbuf *mb = NULL;
1381 uint16_t iq_sample_size_bits = 16;
1383 tti = frame_id * SLOTS_PER_SYSTEMFRAME + subframe_id * SLOTNUM_PER_SUBFRAME + slot_id;
1385 status = tti << 16 | symb_id;
1387 if(tti < xran_fs_get_max_slot() && CC_ID < XRAN_MAX_SECTOR_NR && Ant_ID < XRAN_MAX_ANTENNA_NR && symb_id < XRAN_NUM_OF_SYMBOL_PER_SLOT){
1388 pos = (char*) p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData;
1389 pos += start_prbu * N_SC_PER_PRB*(iq_sample_size_bits/8)*2;
1390 if(pos && iq_data_start && size){
1391 if (p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_CPU_LE_BYTE_ORDER) {
1393 uint16_t *psrc = (uint16_t *)iq_data_start;
1394 uint16_t *pdst = (uint16_t *)pos;
1395 rte_panic("XRAN_CPU_LE_BYTE_ORDER is not supported 0x16%lx\n", (long)mb);
1396 /* network byte (be) order of IQ to CPU byte order (le) */
1397 for (idx = 0; idx < size/sizeof(int16_t); idx++){
1398 pdst[idx] = (psrc[idx]>>8) | (psrc[idx]<<8); //rte_be_to_cpu_16(psrc[idx]);
1400 } else if (likely(p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder == XRAN_NE_BE_BYTE_ORDER)){
1401 if (likely (p_xran_dev_ctx->fh_init.mtu >=
1402 p_xran_dev_ctx->fh_cfg.nULRBs * N_SC_PER_PRB*(iq_sample_size_bits/8)*2)) {
1403 /* no fragmentation */
1404 mb = p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl;
1406 rte_pktmbuf_free(mb);
1408 print_err("mb==NULL\n");
1410 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pData = iq_data_start;
1411 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][CC_ID][Ant_ID].sBufferList.pBuffers[symb_id].pCtrl = mbuf;
1412 *mb_free = MBUF_KEEP;
1414 /* packet can be fragmented copy RBs */
1415 rte_memcpy(pos, iq_data_start, size);
1416 *mb_free = MBUF_FREE;
1419 #ifdef DEBUG_XRAN_BUFFERS
1420 if (pos[0] != tti % XRAN_N_FE_BUF_LEN ||
1424 printf("%d %d %d %d\n", pos[0], pos[1], pos[2], pos[3]);
1428 print_err("pos %p iq_data_start %p size %d\n",pos, iq_data_start, size);
1431 print_err("TTI %d(f_%d sf_%d slot_%d) CC %d Ant_ID %d symb_id %d\n",tti, frame_id, subframe_id, slot_id, CC_ID, Ant_ID, symb_id);
1437 /* Send burst of packets on an output interface */
1439 xran_send_burst(struct xran_device_ctx *dev, uint16_t n, uint16_t port)
1441 struct rte_mbuf **m_table;
1447 m_table = (struct rte_mbuf **)dev->tx_mbufs[port].m_table;
1449 for(i = 0; i < n; i++){
1450 rte_mbuf_sanity_check(m_table[i], 0);
1451 /*rte_pktmbuf_dump(stdout, m_table[i], 256);*/
1453 ret += xran_ethdi_mbuf_send(m_table[i], ETHER_TYPE_ECPRI);
1457 if (unlikely(ret < n)) {
1458 print_err("ret < n\n");
1465 int xran_process_tx_sym(void *arg)
1469 uint32_t mlogVar[10];
1470 uint32_t mlogVarCnt = 0;
1472 unsigned long t1 = MLogTick();
1474 void *pHandle = NULL;
1477 uint8_t num_eAxc = 0;
1478 uint8_t num_CCPorts = 0;
1480 uint32_t frame_id = 0;
1481 uint32_t subframe_id = 0;
1482 uint32_t slot_id = 0;
1483 uint32_t sym_id = 0;
1484 uint32_t sym_idx = 0;
1489 uint16_t iq_sample_size_bits = 16; // TODO: make dynamic per
1491 struct xran_section_info *sectinfo;
1493 uint32_t num_sections;
1496 enum xran_pkt_dir direction;
1498 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1499 struct xran_timer_ctx *pTCtx = (struct xran_timer_ctx *)arg;
1501 if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1504 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
1505 direction = XRAN_DIR_DL; /* lls-CU */
1506 prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
1508 direction = XRAN_DIR_UL; /* RU */
1509 prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
1512 /* RU: send symb after OTA time with delay (UL) */
1513 /* lls-CU:send symb in advance of OTA time (DL) */
1514 sym_idx = XranOffsetSym(p_xran_dev_ctx->sym_up, xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT*SLOTNUM_PER_SUBFRAME*1000);
1516 tti = XranGetTtiNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1517 slot_id = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
1518 subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
1519 frame_id = XranGetFrameNum(tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
1520 sym_id = XranGetSymNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1521 ctx_id = XranGetSlotNum(tti, SLOTS_PER_SYSTEMFRAME) % XRAN_MAX_SECTIONDB_CTX;
1523 print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
1526 mlogVar[mlogVarCnt++] = 0xAAAAAAAA;
1527 mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx;
1528 mlogVar[mlogVarCnt++] = sym_idx;
1529 mlogVar[mlogVarCnt++] = abs(p_xran_dev_ctx->sym_up);
1530 mlogVar[mlogVarCnt++] = tti;
1531 mlogVar[mlogVarCnt++] = frame_id;
1532 mlogVar[mlogVarCnt++] = subframe_id;
1533 mlogVar[mlogVarCnt++] = slot_id;
1534 mlogVar[mlogVarCnt++] = sym_id;
1535 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
1539 print_err("OTA %d: TX:[sym_idx %d: TTI %d] fr %d sf %d slot %d sym %d\n",xran_lib_ota_sym_idx, sym_idx, tti, frame_id, subframe_id, slot_id, sym_id);
1540 xran_if_current_state = XRAN_STOPPED;
1543 num_eAxc = xran_get_num_eAxc(pHandle);
1544 num_CCPorts = xran_get_num_cc(pHandle);
1547 for(ant_id = 0; ant_id < num_eAxc; ant_id++) {
1548 for(cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1549 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU
1550 && p_xran_dev_ctx->enableCP) {
1551 /*==== lls-CU and C-Plane has been enabled ===*/
1553 num_sections = xran_cp_getsize_section_info(pHandle, direction, cc_id, ant_id, ctx_id);
1554 /* iterate C-Plane configuration to generate corresponding U-Plane */
1555 while(next < num_sections) {
1556 sectinfo = xran_cp_iterate_section_info(pHandle, direction, cc_id, ant_id, ctx_id, &next);
1558 if(sectinfo == NULL)
1561 if(sectinfo->type != XRAN_CP_SECTIONTYPE_1) { /* only supports type 1 */
1562 print_err("Invalid section type in section DB - %d", sectinfo->type);
1566 /* skip, if not scheduled */
1567 if(sym_id < sectinfo->startSymId || sym_id >= sectinfo->startSymId + sectinfo->numSymbol)
1570 /* if(sectinfo->compMeth)
1571 iq_sample_size_bits = sectinfo->iqWidth;*/
1573 if(iq_sample_size_bits != 16) {/* TODO: support for compression */
1574 print_err("Incorrect iqWidth %d", iq_sample_size_bits);
1575 iq_sample_size_bits = 16;
1578 print_dbg(">>> sym %2d [%d] type%d, id %d, startPrbc=%d, numPrbc=%d, numSymbol=%d\n", sym_id, next,
1579 sectinfo->type, sectinfo->id, sectinfo->startPrbc,
1580 sectinfo->numPrbc, sectinfo->numSymbol);
1582 p_xran_dev_ctx->tx_mbufs[0].len = 0;
1583 uint16_t len = p_xran_dev_ctx->tx_mbufs[0].len;
1587 //Added for Klocworks
1588 if (len >= MBUF_TABLE_SIZE)
1589 len = MBUF_TABLE_SIZE - 1;
1591 pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1592 mb = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
1594 /* first all PRBs */
1595 prepare_symbol_ex(direction, sectinfo->id,
1597 (struct rb_map *)pos,
1598 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1599 frame_id, subframe_id, slot_id, sym_id,
1600 sectinfo->startPrbc, sectinfo->numPrbc,
1602 xran_get_updl_seqid(pHandle, cc_id, ant_id),
1605 /* if we don't need to do any fragmentation */
1606 if (likely (p_xran_dev_ctx->fh_init.mtu >=
1607 sectinfo->numPrbc * N_SC_PER_PRB*(iq_sample_size_bits/8)*2)) {
1608 /* no fragmentation */
1609 p_xran_dev_ctx->tx_mbufs[0].m_table[len] = mb;
1613 len2 = xran_app_fragment_packet(mb,
1614 &p_xran_dev_ctx->tx_mbufs[0].m_table[len],
1615 (uint16_t)(MBUF_TABLE_SIZE - len),
1616 p_xran_dev_ctx->fh_init.mtu,
1617 p_xran_dev_ctx->direct_pool,
1618 p_xran_dev_ctx->indirect_pool,
1620 xran_get_updl_seqid_addr(pHandle, cc_id, ant_id));
1622 /* Free input packet */
1623 rte_pktmbuf_free(mb);
1625 /* If we fail to fragment the packet */
1626 if (unlikely (len2 < 0)){
1627 print_err("len2= %d\n", len2);
1633 for (i = len; i < len + len2; i ++) {
1635 m = p_xran_dev_ctx->tx_mbufs[0].m_table[i];
1636 struct ether_hdr *eth_hdr = (struct ether_hdr *)
1637 rte_pktmbuf_prepend(m, (uint16_t)sizeof(struct ether_hdr));
1638 if (eth_hdr == NULL) {
1639 rte_panic("No headroom in mbuf.\n");
1646 if (unlikely(len > XRAN_MAX_PKT_BURST_PER_SYM)) {
1647 rte_panic("XRAN_MAX_PKT_BURST_PER_SYM\n");
1650 /* Transmit packets */
1651 xran_send_burst(p_xran_dev_ctx, (uint16_t)len, 0);
1652 p_xran_dev_ctx->tx_mbufs[0].len = 0;
1653 } /* while(section) */
1657 /*==== RU or C-Plane is disabled ===*/
1658 xRANPrachCPConfigStruct *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
1660 if(xran_fs_get_slot_type(cc_id, tti, ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SLOT_TYPE_DL : XRAN_SLOT_TYPE_UL)) == 1
1661 || xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_SP) == 1
1662 || xran_fs_get_slot_type(cc_id, tti, XRAN_SLOT_TYPE_FDD) == 1){
1664 if(xran_fs_get_symbol_type(cc_id, tti, sym_id) == ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SYMBOL_TYPE_DL : XRAN_SYMBOL_TYPE_UL)
1665 || xran_fs_get_symbol_type(cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_FDD){
1667 if(iq_sample_size_bits != 16)
1668 print_err("Incorrect iqWidth %d\n", iq_sample_size_bits );
1670 pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1671 mb = (void*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
1673 if( prb_num > 136 || prb_num == 0) {
1674 uint16_t sec_id = xran_alloc_sectionid(pHandle, direction, cc_id, ant_id, slot_id);
1675 /* first 136 PRBs */
1676 send_symbol_ex(direction,
1679 (struct rb_map *)pos,
1680 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1681 frame_id, subframe_id, slot_id, sym_id,
1684 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1685 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1686 xran_get_upul_seqid(pHandle, cc_id, ant_id));
1688 pos += 136 * N_SC_PER_PRB * (iq_sample_size_bits/8)*2;
1690 send_symbol_ex(direction, sec_id,
1692 (struct rb_map *)pos,
1693 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1694 frame_id, subframe_id, slot_id, sym_id,
1697 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1698 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1699 xran_get_upul_seqid(pHandle, cc_id, ant_id));
1701 #ifdef DEBUG_XRAN_BUFFERS
1702 if (pos[0] != tti % XRAN_N_FE_BUF_LEN ||
1706 printf("%d %d %d %d\n", pos[0], pos[1], pos[2], pos[3]);
1708 send_symbol_ex(direction,
1709 xran_alloc_sectionid(pHandle, direction, cc_id, ant_id, slot_id),
1710 (struct rte_mbuf *)mb,
1711 (struct rb_map *)pos,
1712 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1713 frame_id, subframe_id, slot_id, sym_id,
1716 (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
1717 xran_get_updl_seqid(pHandle, cc_id, ant_id) :
1718 xran_get_upul_seqid(pHandle, cc_id, ant_id));
1721 if(p_xran_dev_ctx->enablePrach
1722 && (p_xran_dev_ctx->fh_init.io_cfg.id == O_RU)) { /* Only RU needs to send PRACH I/Q */
1723 uint32_t isPRACHslot = xran_isPRACHSlot(subframe_id, slot_id);
1724 if((frame_id % pPrachCPConfig->x == pPrachCPConfig->y[0])
1725 && (isPRACHslot == 1)
1726 && (sym_id >= p_xran_dev_ctx->prach_start_symbol[cc_id])
1727 && (sym_id <= p_xran_dev_ctx->prach_last_symbol[cc_id])) { //is prach slot
1728 for(ant_id = 0; ant_id < num_eAxc; ant_id++) {
1729 int prach_port_id = ant_id + num_eAxc;
1730 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pData;
1731 pos += (sym_id - p_xran_dev_ctx->prach_start_symbol[cc_id]) * pPrachCPConfig->numPrbc * N_SC_PER_PRB * 4;
1732 mb = NULL;//(void*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pCtrl;
1733 send_symbol_ex(direction,
1734 xran_alloc_sectionid(pHandle, direction, cc_id, prach_port_id, slot_id),
1735 (struct rte_mbuf *)mb,
1736 (struct rb_map *)pos,
1737 p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
1738 frame_id, subframe_id, slot_id, sym_id,
1739 pPrachCPConfig->startPrbc, pPrachCPConfig->numPrbc,
1740 cc_id, prach_port_id,
1741 xran_get_upul_seqid(pHandle, cc_id, prach_port_id));
1743 } /* if((frame_id % pPrachCPConfig->x == pPrachCPConfig->y[0]) .... */
1744 } /* if(p_xran_dev_ctx->enablePrach ..... */
1746 } /* RU mode or C-Plane is not used */
1749 } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
1750 } /* for(ant_id = 0; ant_id < num_eAxc; ant_id++) */
1752 MLogTask(PID_PROCESS_TX_SYM, t1, MLogTick());
1756 int xran_packet_and_dpdk_timer_thread(void *args)
1758 struct xran_ethdi_ctx *const ctx = xran_ethdi_get_ctx();
1760 uint64_t prev_tsc = 0;
1761 uint64_t cur_tsc = rte_rdtsc();
1762 uint64_t diff_tsc = cur_tsc - prev_tsc;
1764 struct sched_param sched_param;
1766 printf("%s [CPU %2d] [PID: %6d]\n", __FUNCTION__, rte_lcore_id(), getpid());
1768 sched_param.sched_priority = XRAN_THREAD_DEFAULT_PRIO;
1770 if ((res = pthread_setschedparam(pthread_self(), 1, &sched_param)))
1772 printf("priority is not changed: coreId = %d, result1 = %d\n",rte_lcore_id(), res);
1777 cur_tsc = rte_rdtsc();
1778 diff_tsc = cur_tsc - prev_tsc;
1779 if (diff_tsc > TIMER_RESOLUTION_CYCLES) {
1784 if (XRAN_STOPPED == xran_if_current_state)
1788 printf("Closing pkts timer thread...\n");
1794 xran_init(int argc, char *argv[],
1795 struct xran_fh_init *p_xran_fh_init, char *appName, void ** pXranLayerHandle)
1800 struct xran_io_loop_cfg *p_io_cfg = (struct xran_io_loop_cfg *)&p_xran_fh_init->io_cfg;
1801 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
1803 int32_t lcore_id = 0;
1806 memset(p_xran_dev_ctx, 0, sizeof(struct xran_device_ctx));
1809 p_xran_dev_ctx->fh_init = *p_xran_fh_init;
1811 printf(" %s: MTU %d\n", __FUNCTION__, p_xran_dev_ctx->fh_init.mtu);
1813 xran_if_current_state = XRAN_INIT;
1815 memcpy(&(p_xran_dev_ctx->eAxc_id_cfg), &(p_xran_fh_init->eAxCId_conf), sizeof(struct xran_eaxcid_config));
1817 p_xran_dev_ctx->enableCP = p_xran_fh_init->enableCP;
1818 p_xran_dev_ctx->enablePrach = p_xran_fh_init->prachEnable;
1819 p_xran_dev_ctx->DynamicSectionEna = p_xran_fh_init->DynamicSectionEna;
1821 xran_register_ethertype_handler(ETHER_TYPE_ECPRI, handle_ecpri_ethertype);
1822 if (p_io_cfg->id == 0)
1823 xran_ethdi_init_dpdk_io(p_xran_fh_init->filePrefix,
1826 (struct ether_addr *)p_xran_fh_init->p_o_du_addr,
1827 (struct ether_addr *)p_xran_fh_init->p_o_ru_addr,
1828 p_xran_fh_init->cp_vlan_tag,
1829 p_xran_fh_init->up_vlan_tag);
1831 xran_ethdi_init_dpdk_io(p_xran_fh_init->filePrefix,
1834 (struct ether_addr *)p_xran_fh_init->p_o_ru_addr,
1835 (struct ether_addr *)p_xran_fh_init->p_o_du_addr,
1836 p_xran_fh_init->cp_vlan_tag,
1837 p_xran_fh_init->up_vlan_tag);
1839 for(i = 0; i < 10; i++ )
1840 rte_timer_init(&tti_to_phy_timer[i]);
1842 rte_timer_init(&sym_timer);
1843 for (i = 0; i< MAX_NUM_OF_DPDK_TIMERS; i++)
1844 rte_timer_init(&dpdk_timer[i]);
1846 p_xran_dev_ctx->direct_pool = socket_direct_pool;
1847 p_xran_dev_ctx->indirect_pool = socket_indirect_pool;
1849 printf("Set debug stop %d, debug stop count %d\n", p_xran_fh_init->debugStop, p_xran_fh_init->debugStopCount);
1850 timing_set_debug_stop(p_xran_fh_init->debugStop, p_xran_fh_init->debugStopCount);
1852 for (uint32_t nCellIdx = 0; nCellIdx < XRAN_MAX_SECTOR_NR; nCellIdx++){
1853 xran_fs_clear_slot_type(nCellIdx);
1856 *pXranLayerHandle = p_xran_dev_ctx;
1861 int32_t xran_sector_get_instances (void * pDevHandle, uint16_t nNumInstances,
1862 xran_cc_handle_t * pSectorInstanceHandles)
1864 xran_status_t nStatus = XRAN_STATUS_FAIL;
1865 struct xran_device_ctx *pDev = (struct xran_device_ctx *)pDevHandle;
1866 XranSectorHandleInfo *pCcHandle = NULL;
1869 /* Check for the Valid Parameters */
1870 CHECK_NOT_NULL (pSectorInstanceHandles, XRAN_STATUS_INVALID_PARAM);
1872 if (!nNumInstances) {
1873 print_dbg("Instance is not assigned for this function !!! \n");
1874 return XRAN_STATUS_INVALID_PARAM;
1877 for (i = 0; i < nNumInstances; i++) {
1879 /* Allocate Memory for CC handles */
1880 pCcHandle = (XranSectorHandleInfo *) _mm_malloc( /*"xran_cc_handles",*/ sizeof (XranSectorHandleInfo), 64);
1882 if(pCcHandle == NULL)
1883 return XRAN_STATUS_RESOURCE;
1885 memset (pCcHandle, 0, (sizeof (XranSectorHandleInfo)));
1887 pCcHandle->nIndex = i;
1888 pCcHandle->nXranPort = pDev->xran_port_id;
1890 printf("%s [%d]: CC %d handle %p\n", __FUNCTION__, pDev->xran_port_id, i, pCcHandle);
1891 pLibInstanceHandles[pDev->xran_port_id][i] = pSectorInstanceHandles[i] = pCcHandle;
1893 printf("Handle: %p Instance: %p\n",
1894 &pSectorInstanceHandles[i], pSectorInstanceHandles[i]);
1897 return XRAN_STATUS_SUCCESS;
1900 int32_t xran_mm_init (void * pHandle, uint64_t nMemorySize,
1901 uint32_t nMemorySegmentSize)
1903 /* we use mbuf from dpdk memory */
1907 int32_t xran_bm_init (void * pHandle, uint32_t * pPoolIndex, uint32_t nNumberOfBuffers, uint32_t nBufferSize)
1909 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
1910 uint32_t nAllocBufferSize;
1912 char pool_name[RTE_MEMPOOL_NAMESIZE];
1914 snprintf(pool_name, RTE_MEMPOOL_NAMESIZE, "ru_%d_cc_%d_idx_%d",
1915 pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex);
1917 nAllocBufferSize = nBufferSize + sizeof(struct ether_hdr) +
1918 sizeof (struct xran_ecpri_hdr) +
1919 sizeof (struct radio_app_common_hdr) +
1920 sizeof(struct data_section_hdr) + 256;
1923 printf("%s: [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d\n", pool_name,
1924 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize);
1926 pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex] = rte_pktmbuf_pool_create(pool_name, nNumberOfBuffers,
1927 MBUF_CACHE, 0, nAllocBufferSize, rte_socket_id());
1929 if(pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex] == NULL){
1930 rte_panic("rte_pktmbuf_pool_create failed [ handle %p %d %d ] [nPoolIndex %d] nNumberOfBuffers %d nBufferSize %d errno %s\n",
1931 pXranCc, pXranCc->nXranPort, pXranCc->nIndex, pXranCc->nBufferPoolIndex, nNumberOfBuffers, nBufferSize, rte_strerror(rte_errno));
1935 pXranCc->bufferPoolElmSz[pXranCc->nBufferPoolIndex] = nBufferSize;
1936 pXranCc->bufferPoolNumElm[pXranCc->nBufferPoolIndex] = nNumberOfBuffers;
1938 printf("CC:[ handle %p ru %d cc_idx %d ] [nPoolIndex %d] mb pool %p \n",
1939 pXranCc, pXranCc->nXranPort, pXranCc->nIndex,
1940 pXranCc->nBufferPoolIndex, pXranCc->p_bufferPool[pXranCc->nBufferPoolIndex]);
1942 *pPoolIndex = pXranCc->nBufferPoolIndex++;
1947 int32_t xran_bm_allocate_buffer(void * pHandle, uint32_t nPoolIndex, void **ppData, void **ppCtrl)
1949 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
1953 struct rte_mbuf * mb = rte_pktmbuf_alloc(pXranCc->p_bufferPool[nPoolIndex]);
1956 char * start = rte_pktmbuf_append(mb, pXranCc->bufferPoolElmSz[nPoolIndex]);
1957 char * ethhdr = rte_pktmbuf_prepend(mb, sizeof(struct ether_hdr));
1959 if(start && ethhdr){
1960 char * iq_offset = rte_pktmbuf_mtod(mb, char * );
1962 iq_offset = iq_offset + sizeof(struct ether_hdr) +
1963 sizeof (struct xran_ecpri_hdr) +
1964 sizeof (struct radio_app_common_hdr) +
1965 sizeof(struct data_section_hdr);
1967 if (0) /* if compression */
1968 iq_offset += sizeof (struct data_section_compression_hdr);
1970 *ppData = (void *)iq_offset;
1971 *ppCtrl = (void *)mb;
1974 print_err("[nPoolIndex %d] start ethhdr failed \n", nPoolIndex );
1978 print_err("[nPoolIndex %d] mb alloc failed \n", nPoolIndex );
1982 if (*ppData == NULL){
1983 print_err("[nPoolIndex %d] rte_pktmbuf_append for %d failed \n", nPoolIndex, pXranCc->bufferPoolElmSz[nPoolIndex]);
1990 int32_t xran_bm_free_buffer(void * pHandle, void *pData, void *pCtrl)
1992 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
1995 rte_pktmbuf_free(pCtrl);
2000 int32_t xran_5g_fronthault_config (void * pHandle,
2001 struct xran_buffer_list *pSrcBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2002 struct xran_buffer_list *pSrcCpBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2003 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2004 struct xran_buffer_list *pDstCpBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2005 xran_transport_callback_fn pCallback,
2008 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2009 xran_status_t nStatus = XRAN_STATUS_SUCCESS;
2011 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2013 print_dbg("%s\n", __FUNCTION__);
2017 printf("Handle is NULL!\n");
2018 return XRAN_STATUS_FAIL;
2021 if (pCallback == NULL)
2023 printf ("no callback\n");
2024 return XRAN_STATUS_FAIL;
2027 i = pXranCc->nIndex;
2029 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2031 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2034 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].bValid = 0;
2035 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2036 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2037 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2038 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2039 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulTxBuffers[j][i][z][0];
2041 p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList = *pSrcBuffer[z][j];
2044 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
2045 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2046 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2047 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2048 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2049 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulTxPrbMapBuffers[j][i][z][0];
2051 p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList = *pSrcCpBuffer[z][j];
2055 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].bValid = 0;
2056 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2057 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2058 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2059 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2060 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulRxBuffers[j][i][z][0];
2062 p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2065 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
2066 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2067 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2068 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2069 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
2070 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFrontHaulRxPrbMapBuffers[j][i][z][0];
2072 p_xran_dev_ctx->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList = *pDstCpBuffer[z][j];
2077 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2078 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2079 printf("TTI:TX 0x%02x Sec %d Ant%d\n",j,i,z);
2080 for(k = 0; k <XRAN_NUM_OF_SYMBOL_PER_SLOT; k++){
2081 uint8_t *ptr = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData;
2082 printf(" sym: %2d %p 0x%02x 0x%02x 0x%02x 0x%02x 0x%02x\n", k, ptr, ptr[0],ptr[1], ptr[2], ptr[3], ptr[4]);
2086 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2087 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2088 printf("TTI:RX 0x%02x Sec %d Ant%d\n",j,i,z);
2089 for(k = 0; k <XRAN_NUM_OF_SYMBOL_PER_SLOT; k++){
2090 uint8_t *ptr = p_xran_dev_ctx->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData;
2091 printf(" sym: %2d %p 0x%02x 0x%02x 0x%02x 0x%02x 0x%02x\n", k, ptr, ptr[0],ptr[1], ptr[2], ptr[3], ptr[4]);
2096 p_xran_dev_ctx->pCallback[i] = pCallback;
2097 p_xran_dev_ctx->pCallbackTag[i] = pCallbackTag;
2099 p_xran_dev_ctx->xran2phy_mem_ready = 1;
2104 int32_t xran_5g_prach_req (void * pHandle,
2105 struct xran_buffer_list *pDstBuffer[XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN],
2106 xran_transport_callback_fn pCallback,
2109 XranSectorHandleInfo* pXranCc = (XranSectorHandleInfo*) pHandle;
2110 xran_status_t nStatus = XRAN_STATUS_SUCCESS;
2112 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2116 printf("Handle is NULL!\n");
2117 return XRAN_STATUS_FAIL;
2119 if (pCallback == NULL)
2121 printf ("no callback\n");
2122 return XRAN_STATUS_FAIL;
2125 i = pXranCc->nIndex;
2127 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
2129 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
2130 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].bValid = 0;
2131 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
2132 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
2133 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
2134 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_MAX_ANTENNA_NR; // ant number.
2135 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &p_xran_dev_ctx->sFHPrachRxBuffers[j][i][z][0];
2136 p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList = *pDstBuffer[z][j];
2140 p_xran_dev_ctx->pPrachCallback[i] = pCallback;
2141 p_xran_dev_ctx->pPrachCallbackTag[i] = pCallbackTag;
2146 int32_t xran_open(void *pHandle, struct xran_fh_config* pConf)
2149 uint8_t nNumerology = 0;
2150 int32_t lcore_id = 0;
2151 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2152 struct xran_fh_config *pFhCfg;
2153 pFhCfg = &(p_xran_dev_ctx->fh_cfg);
2155 memcpy(pFhCfg, pConf, sizeof(struct xran_fh_config));
2157 nNumerology = xran_get_conf_numerology(pHandle);
2159 if (pConf->nCC > XRAN_MAX_SECTOR_NR)
2161 if(pConf->log_level)
2162 printf("Number of cells %d exceeds max number supported %d!\n", pConf->nCC, XRAN_MAX_SECTOR_NR);
2163 pConf->nCC = XRAN_MAX_SECTOR_NR;
2166 if(pConf->ru_conf.iqOrder != XRAN_I_Q_ORDER
2167 || pConf->ru_conf.byteOrder != XRAN_NE_BE_BYTE_ORDER ){
2169 print_err("Byte order and/or IQ order is not suppirted [IQ %d byte %d]\n", pConf->ru_conf.iqOrder, pConf->ru_conf.byteOrder);
2170 return XRAN_STATUS_FAIL;
2173 //setup PRACH configuration for C-Plane
2174 xran_init_prach(pConf, p_xran_dev_ctx);
2176 xran_cp_init_sectiondb(pHandle);
2177 xran_init_sectionid(pHandle);
2178 xran_init_seqid(pHandle);
2180 interval_us = xran_fs_get_tti_interval(nNumerology);
2182 if(pConf->log_level){
2183 printf("%s: interval_us=%ld\n", __FUNCTION__, interval_us);
2185 timing_set_numerology(nNumerology);
2187 for(i = 0 ; i <pConf->nCC; i++){
2188 xran_fs_set_slot_type(i, pConf->frame_conf.nFrameDuplexType, pConf->frame_conf.nTddPeriod,
2189 pConf->frame_conf.sSlotConfig);
2192 xran_fs_slot_limit_init(xran_fs_get_tti_interval(nNumerology));
2194 if(xran_ethdi_get_ctx()->io_cfg.bbdev_mode != XRAN_BBDEV_NOT_USED){
2195 p_xran_dev_ctx->bbdev_dec = pConf->bbdev_dec;
2196 p_xran_dev_ctx->bbdev_enc = pConf->bbdev_enc;
2199 /* Start packet processing thread */
2200 if((uint16_t)xran_ethdi_get_ctx()->io_cfg.port[XRAN_UP_VF] != 0xFFFF &&
2201 (uint16_t)xran_ethdi_get_ctx()->io_cfg.port[XRAN_CP_VF] != 0xFFFF ){
2202 if(pConf->log_level){
2203 print_dbg("XRAN_UP_VF: 0x%04x\n", xran_ethdi_get_ctx()->io_cfg.port[XRAN_UP_VF]);
2204 print_dbg("XRAN_CP_VF: 0x%04x\n", xran_ethdi_get_ctx()->io_cfg.port[XRAN_CP_VF]);
2206 if (rte_eal_remote_launch(xran_timing_source_thread, xran_dev_get_ctx(), xran_ethdi_get_ctx()->io_cfg.timing_core))
2207 rte_panic("thread_run() failed to start\n");
2209 if(pConf->log_level){
2210 printf("Eth port was not open. Processing thread was not started\n");
2218 int32_t xran_start(void *pHandle)
2220 xran_if_current_state = XRAN_RUNNING;
2224 int32_t xran_stop(void *pHandle)
2226 xran_if_current_state = XRAN_STOPPED;
2230 int32_t xran_close(void *pHandle)
2232 xran_if_current_state = XRAN_STOPPED;
2233 //TODO: fix memory leak xran_cp_free_sectiondb(pHandle);
2234 //rte_eal_mp_wait_lcore();
2235 //xran_ethdi_ports_stats();
2240 int32_t xran_mm_destroy (void * pHandle)
2242 /* functionality is not yet implemented */
2246 int32_t xran_reg_sym_cb(void *pHandle, xran_callback_sym_fn symCb, void * symCbParam, uint8_t symb, uint8_t ant)
2248 /* functionality is not yet implemented */
2252 int32_t xran_reg_physide_cb(void *pHandle, xran_fh_tti_callback_fn Cb, void *cbParam, int skipTtiNum, enum callback_to_phy_id id)
2254 struct xran_device_ctx * p_xran_dev_ctx = xran_dev_get_ctx();
2256 p_xran_dev_ctx->ttiCb[id] = Cb;
2257 p_xran_dev_ctx->TtiCbParam[id] = cbParam;
2258 p_xran_dev_ctx->SkipTti[id] = skipTtiNum;
2263 int32_t xran_get_slot_idx (uint32_t *nFrameIdx, uint32_t *nSubframeIdx, uint32_t *nSlotIdx, uint64_t *nSecond)
2267 tti = (int32_t)XranGetTtiNum(xran_lib_ota_sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2268 *nSlotIdx = (uint32_t)XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME);
2269 *nSubframeIdx = (uint32_t)XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME, SUBFRAMES_PER_SYSTEMFRAME);
2270 *nFrameIdx = (uint32_t)XranGetFrameNum(tti,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME);
2271 *nSecond = timing_get_current_second();
2278 * @brief Get the configuration of eAxC ID
2280 * @return the pointer of configuration
2282 inline struct xran_eaxcid_config *xran_get_conf_eAxC(void *pHandle)
2284 return (&(xran_dev_get_ctx()->eAxc_id_cfg));
2288 * @brief Get the configuration of subcarrier spacing for PRACH
2290 * @return subcarrier spacing value for PRACH
2292 inline uint8_t xran_get_conf_prach_scs(void *pHandle)
2294 return (xran_lib_get_ctx_fhcfg()->prach_conf.nPrachSubcSpacing);
2298 * @brief Get the configuration of FFT size for RU
2300 * @return FFT size value for RU
2302 inline uint8_t xran_get_conf_fftsize(void *pHandle)
2304 return (xran_lib_get_ctx_fhcfg()->ru_conf.fftSize);
2308 * @brief Get the configuration of nummerology
2310 * @return Configured numerology
2312 inline uint8_t xran_get_conf_numerology(void *pHandle)
2314 return (xran_lib_get_ctx_fhcfg()->frame_conf.nNumerology);
2318 * @brief Get the configuration of IQ bit width for RU
2320 * @return IQ bit width for RU
2322 inline uint8_t xran_get_conf_iqwidth(void *pHandle)
2324 struct xran_fh_config *pFhCfg;
2326 pFhCfg = xran_lib_get_ctx_fhcfg();
2327 return ((pFhCfg->ru_conf.iqWidth==16)?0:pFhCfg->ru_conf.iqWidth);
2331 * @brief Get the configuration of compression method for RU
2333 * @return Compression method for RU
2335 inline uint8_t xran_get_conf_compmethod(void *pHandle)
2337 return (xran_lib_get_ctx_fhcfg()->ru_conf.compMeth);
2342 * @brief Get the configuration of the number of component carriers
2344 * @return Configured the number of component carriers
2346 inline uint8_t xran_get_num_cc(void *pHandle)
2348 return (xran_lib_get_ctx_fhcfg()->nCC);
2352 * @brief Get the configuration of the number of antenna
2354 * @return Configured the number of antenna
2356 inline uint8_t xran_get_num_eAxc(void *pHandle)
2358 return (xran_lib_get_ctx_fhcfg()->neAxc);
2361 int32_t xran_get_common_counters(void *pXranLayerHandle, struct xran_common_counters *pStats)
2363 struct xran_device_ctx* pDev = (struct xran_device_ctx*)pXranLayerHandle;
2365 if(pStats && pDev) {
2366 *pStats = pDev->fh_counters;
2367 return XRAN_STATUS_SUCCESS;
2369 return XRAN_STATUS_INVALID_PARAM;