1 /******************************************************************************
3 * Copyright (c) 2020 Intel.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
17 *******************************************************************************/
20 * @brief This module provides interface implementation to ORAN FH from Application side
21 * @file app_iof_fh_xran.c
23 * @author Intel Corporation
28 #include <sys/syscall.h>
29 #include <sys/sysinfo.h>
30 #include <immintrin.h>
33 #include "xran_mlog_lnx.h"
35 #include "xran_fh_o_du.h"
36 #include "xran_fh_o_ru.h"
37 #include "xran_compression.h"
38 #include "xran_cp_api.h"
39 #include "xran_sync_api.h"
40 #include "xran_mlog_task_id.h"
41 #include "app_io_fh_xran.h"
43 #include "app_bbu_pool.h"
46 uint32_t nFpgaToSW_FTH_RxBufferLen;
47 uint32_t nFpgaToSW_PRACH_RxBufferLen;
48 uint32_t nSW_ToFpga_FTH_TxBufferLen;
50 static struct bbu_xran_io_if *p_app_io_xran_if;
52 void * app_io_xran_handle = NULL;
53 struct xran_fh_init app_io_xran_fh_init;
54 struct xran_fh_config app_io_xran_fh_config[XRAN_PORTS_NUM];
56 void app_io_xran_fh_rx_callback(void *pCallbackTag, int32_t status);
57 void app_io_xran_fh_rx_prach_callback(void *pCallbackTag, int32_t status);
58 void app_io_xran_fh_rx_srs_callback(void *pCallbackTag, xran_status_t status);
61 void app_io_xran_fh_bbu_rx_callback(void *pCallbackTag, xran_status_t status);
62 void app_io_xran_fh_bbu_rx_bfw_callback(void *pCallbackTag, xran_status_t status);
63 void app_io_xran_fh_bbu_rx_prach_callback(void *pCallbackTag, xran_status_t status);
64 void app_io_xran_fh_bbu_rx_srs_callback(void *pCallbackTag, xran_status_t status);
67 extern RuntimeConfig* p_startupConfiguration[XRAN_PORTS_NUM];
69 struct bbu_xran_io_if *
70 app_io_xran_if_alloc(void)
74 ptr = _mm_malloc(sizeof(struct bbu_xran_io_if), 256);
76 rte_panic("_mm_malloc: Can't allocate %lu bytes\n", sizeof(struct bbu_xran_io_if));
78 p_app_io_xran_if = (struct bbu_xran_io_if *)ptr;
79 return p_app_io_xran_if;
82 struct bbu_xran_io_if *
83 app_io_xran_if_get(void)
85 return p_app_io_xran_if;
89 app_io_xran_if_free(void)
91 if (p_app_io_xran_if == NULL) {
92 rte_panic("_mm_free: Can't free p_app_io_xran_if\n");
94 _mm_free(p_app_io_xran_if);
98 struct xran_io_shared_ctrl *
99 app_io_xran_if_ctrl_get(uint32_t o_xu_id)
101 if(o_xu_id >= 0 && o_xu_id < XRAN_PORTS_NUM) {
102 return &p_app_io_xran_if->ioCtrl[o_xu_id];
109 app_io_xran_sfidx_get(uint8_t nNrOfSlotInSf)
113 uint32_t nSubframeIdx;
117 /*uint32_t nXranTime = */xran_get_slot_idx(0, &nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
118 nSfIdx = nFrameIdx*NUM_OF_SUBFRAME_PER_FRAME*nNrOfSlotInSf
119 + nSubframeIdx*nNrOfSlotInSf
122 printf("\nxranTime is %d, return is %d, radio frame is %d, subframe is %d slot is %d tsc is %llu us",
135 app_io_xran_fh_rx_callback(void *pCallbackTag, xran_status_t status)
137 uint64_t t1 = MLogTick();
138 uint32_t mlogVar[10];
139 uint32_t mlogVarCnt = 0;
140 //uint8_t Numerlogy = app_io_xran_fh_config[0].frame_conf.nNumerology;
141 //uint8_t nNrOfSlotInSf = 1<<Numerlogy;
142 //int32_t sfIdx = app_io_xran_sfidx_get(nNrOfSlotInSf);
144 int32_t sym, nSlotIdx, ntti;
146 struct xran_cb_tag *pTag = (struct xran_cb_tag *) pCallbackTag;
147 int32_t o_xu_id = pTag->oXuId;
148 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
149 struct xran_fh_config *pXranConf = &app_io_xran_fh_config[o_xu_id];
150 uint32_t xran_max_antenna_nr = RTE_MAX(pXranConf->neAxc, pXranConf->neAxcUl);
151 //int32_t nSectorNum = pXranConf->nCC;
152 uint32_t ant_id, sym_id, idxElm;
153 struct xran_prb_map *pRbMap = NULL;
154 struct xran_prb_elm *pRbElm = NULL;
156 mlog_start = MLogTick();
158 nCellIdx = pTag->cellId;
159 nSlotIdx = pTag->slotiId; ///((status >> 16) & 0xFFFF); /** TTI aka slotIdx */
160 sym = pTag->symbol & 0xFF; /* sym */
161 ntti = (nSlotIdx + XRAN_N_FE_BUF_LEN -1) % XRAN_N_FE_BUF_LEN;
164 mlogVar[mlogVarCnt++] = 0xbcbcbcbc;
165 mlogVar[mlogVarCnt++] = o_xu_id;
166 mlogVar[mlogVarCnt++] = nCellIdx;
167 mlogVar[mlogVarCnt++] = sym;
168 mlogVar[mlogVarCnt++] = nSlotIdx;
169 mlogVar[mlogVarCnt++] = ntti;
170 //mlogVar[mlogVarCnt++] = nSlotIdx % gNumSlotPerSfn[nCellIdx];
171 //mlogVar[mlogVarCnt++] = get_slot_type(nCellIdx, nSlotIdx, SLOT_TYPE_UL);
173 MLogAddVariables(mlogVarCnt, mlogVar, mlog_start);
178 printf("psIoCtrl NULL! o_xu_id= %d\n", o_xu_id);
182 if (sym == XRAN_HALF_CB_SYM) {
184 } else if (sym == XRAN_HALF_CB_SYM) {
186 } else if (sym == XRAN_THREE_FOURTHS_CB_SYM) {
188 } else if (sym == XRAN_FULL_CB_SYM) {
192 MLogTask(PID_GNB_SYM_CB, t1, MLogTick());
196 if(sym == XRAN_FULL_CB_SYM) //full slot callback only
198 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) {
199 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[ntti][nCellIdx][ant_id].sBufferList.pBuffers->pData;
201 printf("(%d:%d:%d)pRbMap == NULL\n", nCellIdx, ntti, ant_id);
204 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
205 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
206 pRbElm = &pRbMap->prbMap[idxElm];
207 pRbElm->nSecDesc[sym_id] = 0;
215 MLogTask(PID_GNB_SYM_CB, t1, MLogTick());
220 app_io_xran_fh_rx_prach_callback(void *pCallbackTag, xran_status_t status)
222 uint64_t t1 = MLogTick();
223 uint32_t mlogVar[10];
224 uint32_t mlogVarCnt = 0;
226 mlogVar[mlogVarCnt++] = 0xDDDDDDDD;
227 mlogVar[mlogVarCnt++] = status >> 16; /* tti */
228 mlogVar[mlogVarCnt++] = status & 0xFF; /* sym */
229 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
232 MLogTask(PID_GNB_PRACH_CB, t1, MLogTick());
236 app_io_xran_fh_rx_srs_callback(void *pCallbackTag, xran_status_t status)
238 uint64_t t1 = MLogTick();
239 uint32_t mlogVar[10];
240 uint32_t mlogVarCnt = 0;
241 //uint8_t Numerlogy = app_io_xran_fh_config[0].frame_conf.nNumerology;
242 //uint8_t nNrOfSlotInSf = 1<<Numerlogy;
243 //int32_t sfIdx = app_io_xran_sfidx_get(nNrOfSlotInSf);
245 int32_t sym, nSlotIdx, ntti;
246 struct xran_cb_tag *pTag = (struct xran_cb_tag *) pCallbackTag;
247 int32_t o_xu_id = pTag->oXuId;
248 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
249 struct xran_fh_config *pXranConf = &app_io_xran_fh_config[o_xu_id];
250 uint32_t xran_max_antenna_nr = RTE_MAX(pXranConf->neAxc, pXranConf->neAxcUl);
251 //int32_t nSectorNum = pXranConf->nCC;
252 uint32_t ant_id, sym_id, idxElm;
253 struct xran_prb_map *pRbMap = NULL;
254 struct xran_prb_elm *pRbElm = NULL;
255 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(pXranConf->nAntElmTRx, xran_max_antenna_nr);
257 nCellIdx = pTag->cellId;
258 nSlotIdx = pTag->slotiId; ///((status >> 16) & 0xFFFF); /** TTI aka slotIdx */
259 sym = pTag->symbol & 0xFF; /* sym */
260 ntti = (nSlotIdx + XRAN_N_FE_BUF_LEN-1) % XRAN_N_FE_BUF_LEN;
263 mlogVar[mlogVarCnt++] = 0xCCCCCCCC;
264 mlogVar[mlogVarCnt++] = o_xu_id;
265 mlogVar[mlogVarCnt++] = nCellIdx;
266 mlogVar[mlogVarCnt++] = sym;
267 mlogVar[mlogVarCnt++] = nSlotIdx;
268 mlogVar[mlogVarCnt++] = ntti;
269 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
274 printf("psIoCtrl NULL! o_xu_id= %d\n", o_xu_id);
278 if(sym == XRAN_FULL_CB_SYM) { //full slot callback only
279 for(ant_id = 0; ant_id < xran_max_ant_array_elm_nr; ant_id++) {
280 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[ntti][nCellIdx][ant_id].sBufferList.pBuffers->pData;
282 printf("(%d:%d:%d)pRbMap == NULL\n", nCellIdx, ntti, ant_id);
285 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
286 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
287 pRbElm = &pRbMap->prbMap[idxElm];
288 pRbElm->nSecDesc[sym_id] = 0;
293 MLogTask(PID_GNB_SRS_CB, t1, MLogTick());
297 app_io_xran_fh_rx_bfw_callback(void *pCallbackTag, xran_status_t status)
299 uint64_t t1 = MLogTick();
300 uint32_t mlogVar[10];
301 uint32_t mlogVarCnt = 0;
303 mlogVar[mlogVarCnt++] = 0xCCCCCCCC;
304 mlogVar[mlogVarCnt++] = status >> 16; /* tti */
305 mlogVar[mlogVarCnt++] = status & 0xFF; /* sym */
306 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
309 MLogTask(PID_GNB_BFW_CB, t1, MLogTick());
313 app_io_xran_dl_tti_call_back(void * param)
315 uint64_t t1 = MLogTick();
317 MLogTask(PID_GNB_PROC_TIMING, t1, MLogTick());
322 app_io_xran_ul_half_slot_call_back(void * param)
324 uint64_t t1 = MLogTick();
326 MLogTask(PID_GNB_PROC_TIMING, t1, MLogTick());
331 app_io_xran_ul_full_slot_call_back(void * param)
333 uint64_t t1 = MLogTick();
335 MLogTask(PID_GNB_PROC_TIMING, t1, MLogTick());
340 app_io_xran_ul_custom_sym_call_back(void * param, struct xran_sense_of_time* time)
342 uint64_t t1 = MLogTick();
343 uint32_t mlogVar[15];
344 uint32_t mlogVarCnt = 0;
345 uint32_t sym_idx = 0;
347 mlogVar[mlogVarCnt++] = 0xDEADDEAD;
349 mlogVar[mlogVarCnt++] = time->type_of_event;
350 mlogVar[mlogVarCnt++] = time->nSymIdx;
351 mlogVar[mlogVarCnt++] = time->tti_counter;
352 mlogVar[mlogVarCnt++] = time->nFrameIdx;
353 mlogVar[mlogVarCnt++] = time->nSubframeIdx;
354 mlogVar[mlogVarCnt++] = time->nSlotIdx;
355 mlogVar[mlogVarCnt++] = (uint32_t)(time->nSecond);
356 mlogVar[mlogVarCnt++] = (uint32_t)(time->nSecond >> 32);
357 sym_idx = time->nSymIdx;
359 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
362 MLogTask(PID_GNB_SYM_CB + sym_idx, t1, MLogTick());
367 NEXT_POW2 ( uint32_t x )
377 app_io_xran_interface(uint32_t o_xu_id, RuntimeConfig *p_o_xu_cfg, UsecaseConfig* p_use_cfg, struct xran_fh_init* p_xran_fh_init)
379 xran_status_t status;
380 struct bbu_xran_io_if *psBbuIo = app_io_xran_if_get();
381 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
382 int32_t nSectorIndex[XRAN_MAX_SECTOR_NR];
384 int32_t i, j, k = 0, z;
390 uint32_t xran_max_antenna_nr = RTE_MAX(p_o_xu_cfg->numAxc, p_o_xu_cfg->numUlAxc);
391 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(p_o_xu_cfg->antElmTRx, xran_max_antenna_nr);
392 uint32_t xran_max_sections_per_slot = RTE_MAX(p_o_xu_cfg->max_sections_per_slot, XRAN_MIN_SECTIONS_PER_SLOT);
393 uint32_t xran_max_prb = app_xran_get_num_rbs(p_o_xu_cfg->xranTech, p_o_xu_cfg->mu_number,p_o_xu_cfg->nDLBandwidth, p_o_xu_cfg->nDLAbsFrePointA);
394 uint32_t numPrbElm = xran_get_num_prb_elm(p_o_xu_cfg->p_PrbMapDl, p_o_xu_cfg->mtu);
395 uint32_t size_of_prb_map = sizeof(struct xran_prb_map) + sizeof(struct xran_prb_elm)*(numPrbElm);
396 uint32_t xran_max_antenna_nr_prach = RTE_MIN(xran_max_antenna_nr, XRAN_MAX_PRACH_ANT_NUM);
398 SWXRANInterfaceTypeEnum eInterfaceType;
400 struct xran_buffer_list *pFthTxBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
401 struct xran_buffer_list *pFthTxPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
402 struct xran_buffer_list *pFthRxBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
403 struct xran_buffer_list *pFthRxPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
404 struct xran_buffer_list *pFthRxRachBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
405 struct xran_buffer_list *pFthRxRachBufferDecomp[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
406 struct xran_buffer_list *pFthRxSrsBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN];
407 struct xran_buffer_list *pFthRxSrsPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN];
409 struct xran_buffer_list *pFthRxCpPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN];
410 struct xran_buffer_list *pFthTxCpPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN];
413 rte_panic("psBbuIo == NULL\n");
416 rte_panic("psIoCtrl == NULL\n");
418 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++)
420 nSectorIndex[nSectorNum] = nSectorNum;
423 nSectorNum = p_o_xu_cfg->numCC;
426 psBbuIo->num_o_ru = p_use_cfg->oXuNum;
427 psBbuIo->bbu_offload = p_xran_fh_init->io_cfg.bbu_offload;
430 psIoCtrl->byteOrder = XRAN_NE_BE_BYTE_ORDER;
431 psIoCtrl->iqOrder = XRAN_I_Q_ORDER;
433 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++)
435 nSectorIndex[nSectorNum] = nSectorNum;
438 if(p_use_cfg->oXuNum > 1 && p_use_cfg->oXuNum <= XRAN_PORTS_NUM) {
439 nSectorNum = p_o_xu_cfg->numCC;
440 psBbuIo->num_cc_per_port[o_xu_id] = p_o_xu_cfg->numCC;
441 printf("port %d has %d CCs\n",o_xu_id, psBbuIo->num_cc_per_port[o_xu_id]);
442 for(i = 0; i < XRAN_MAX_SECTOR_NR && i < nSectorNum; i++) {
443 psBbuIo->map_cell_id2port[o_xu_id][i] = (o_xu_id*nSectorNum)+i;
444 printf("port %d cc_id %d is phy id %d\n", o_xu_id, i, psBbuIo->map_cell_id2port[o_xu_id][i]);
448 nSectorNum = p_o_xu_cfg->numCC;;
449 psBbuIo->num_cc_per_port[o_xu_id] = nSectorNum;
450 printf("port %d has %d CCs\n",o_xu_id, psBbuIo->num_cc_per_port[o_xu_id]);
451 for(i = 0; i < XRAN_MAX_SECTOR_NR && i < nSectorNum; i++) {
452 psBbuIo->map_cell_id2port[o_xu_id][i] = i;
453 printf("port %d cc_id %d is phy id %d\n", o_xu_id, i, psBbuIo->map_cell_id2port[o_xu_id][i]);
457 nSectorNum = p_o_xu_cfg->numCC;
458 printf ("XRAN front haul xran_mm_init \n");
459 status = xran_mm_init (app_io_xran_handle, (uint64_t) SW_FPGA_FH_TOTAL_BUFFER_LEN, SW_FPGA_SEGMENT_BUFFER_LEN);
460 if (status != XRAN_STATUS_SUCCESS)
462 printf ("Failed at XRAN front haul xran_mm_init \n");
466 psBbuIo->nInstanceNum[o_xu_id] = p_o_xu_cfg->numCC;
467 if (o_xu_id < XRAN_PORTS_NUM) {
468 status = xran_sector_get_instances (o_xu_id, app_io_xran_handle,
469 psBbuIo->nInstanceNum[o_xu_id],
470 &psBbuIo->nInstanceHandle[o_xu_id][0]);
471 if (status != XRAN_STATUS_SUCCESS) {
472 printf ("get sector instance failed for XRAN nInstanceNum[%d] %d\n",psBbuIo->nInstanceNum[o_xu_id], o_xu_id);
475 for (i = 0; i < psBbuIo->nInstanceNum[o_xu_id]; i++) {
476 printf("%s: CC %d handle %p\n", __FUNCTION__, i, psBbuIo->nInstanceHandle[o_xu_id][i]);
479 printf ("Failed at XRAN front haul xran_mm_init \n");
483 printf("Sucess xran_mm_init \n");
484 printf("nSectorNum %d\n", nSectorNum);
485 printf("xran_max_sections_per_slot %d\n", xran_max_sections_per_slot);
488 for(i = 0; i < nSectorNum; i++)
490 eInterfaceType = XRANFTHTX_OUT;
491 printf("nSectorIndex[%d] = %d\n",i, nSectorIndex[i]);
492 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
493 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, nSW_ToFpga_FTH_TxBufferLen);
494 if(XRAN_STATUS_SUCCESS != status) {
495 rte_panic("Failed at xran_bm_init , status %d\n", status);
497 for(j = 0; j < XRAN_N_FE_BUF_LEN; j++)
499 for(z = 0; z < xran_max_antenna_nr; z++){
500 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].bValid = 0;
501 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
502 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
503 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
504 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
505 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulTxBuffers[j][i][z][0];
507 for(k = 0; k < XRAN_NUM_OF_SYMBOL_PER_SLOT; k++)
509 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = nSW_ToFpga_FTH_TxBufferLen; // 14 symbols 3200bytes/symbol
510 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
511 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
512 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
513 if(XRAN_STATUS_SUCCESS != status){
514 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
516 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
517 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *)mb;
520 u32dptr = (uint32_t*)(ptr);
521 memset(u32dptr, 0x0, nSW_ToFpga_FTH_TxBufferLen);
522 // ptr_temp[0] = j; // TTI
523 // ptr_temp[1] = i; // Sec
524 // ptr_temp[2] = z; // Ant
525 // ptr_temp[3] = k; // sym
527 if(psBbuIo->bbu_offload){
528 status = xran_bm_allocate_ring(psBbuIo->nInstanceHandle[o_xu_id][i], "TXO", i, j, z, k, &ring);
529 if(XRAN_STATUS_SUCCESS != status){
530 rte_panic("Failed at xran_bm_allocate_ring , status %d\n",status);
532 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pRing = (void *)ring;
539 printf("size_of_prb_map %d\n", size_of_prb_map);
541 eInterfaceType = XRANFTHTX_PRB_MAP_OUT;
542 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
543 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, size_of_prb_map);
544 if(XRAN_STATUS_SUCCESS != status) {
545 rte_panic("Failed at xran_bm_init , status %d\n", status);
548 for(j = 0; j < XRAN_N_FE_BUF_LEN; j++)
550 for(z = 0; z < xran_max_antenna_nr; z++){
551 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
552 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
553 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
554 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
555 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
556 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulTxPrbMapBuffers[j][i][z];
558 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
559 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
560 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
561 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
562 if(XRAN_STATUS_SUCCESS != status) {
563 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
565 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
566 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
569 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
570 memset(p_rb_map, 0, size_of_prb_map);
571 if (p_o_xu_cfg->appMode == APP_O_DU) {
572 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
573 if(p_o_xu_cfg->RunSlotPrbMapBySymbolEnable){
574 xran_init_PrbMap_by_symbol_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], ptr, p_o_xu_cfg->mtu, xran_max_prb);
577 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], ptr, p_o_xu_cfg->mtu);
580 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_PrbMapDl, ptr, p_o_xu_cfg->mtu);
583 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
584 if(p_o_xu_cfg->RunSlotPrbMapBySymbolEnable){
585 xran_init_PrbMap_by_symbol_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_UL][j][i][z], ptr, p_o_xu_cfg->mtu, xran_max_prb);
588 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_UL][j][i][z], ptr, p_o_xu_cfg->mtu);
591 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_PrbMapUl, ptr, p_o_xu_cfg->mtu);
599 for(i = 0; i<nSectorNum; i++)
601 eInterfaceType = XRANFTHRX_IN;
602 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
603 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, nSW_ToFpga_FTH_TxBufferLen);
604 if(XRAN_STATUS_SUCCESS != status)
606 printf("Failed at xran_bm_init, status %d\n", status);
607 iAssert(status == XRAN_STATUS_SUCCESS);
610 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++)
612 for(z = 0; z < xran_max_antenna_nr; z++){
613 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].bValid = 0;
614 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
615 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
616 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
617 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
618 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulRxBuffers[j][i][z][0];
619 for(k = 0; k< XRAN_NUM_OF_SYMBOL_PER_SLOT; k++)
621 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = nFpgaToSW_FTH_RxBufferLen; // 1 symbols 3200bytes
622 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
623 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
624 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
625 if(XRAN_STATUS_SUCCESS != status) {
626 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
628 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
629 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *) mb;
631 u32dptr = (uint32_t*)(ptr);
632 //uint8_t *ptr_temp = (uint8_t *)ptr;
633 memset(u32dptr, 0x0, nFpgaToSW_FTH_RxBufferLen);
634 // ptr_temp[0] = j; // TTI
635 // ptr_temp[1] = i; // Sec
636 // ptr_temp[2] = z; // Ant
637 // ptr_temp[3] = k; // sym
644 eInterfaceType = XRANFTHRX_PRB_MAP_IN;
645 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
646 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, size_of_prb_map);
647 if(XRAN_STATUS_SUCCESS != status) {
648 rte_panic("Failed at xran_bm_init, status %d\n", status);
651 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++) {
652 for(z = 0; z < xran_max_antenna_nr; z++){
653 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
654 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
655 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
656 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
657 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
658 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulRxPrbMapBuffers[j][i][z];
660 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
661 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
662 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
663 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
664 if(XRAN_STATUS_SUCCESS != status) {
665 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
667 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
668 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
670 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
671 memset(p_rb_map, 0, size_of_prb_map);
673 if (p_o_xu_cfg->appMode == APP_O_DU) {
674 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
675 if(p_o_xu_cfg->RunSlotPrbMapBySymbolEnable){
676 xran_init_PrbMap_by_symbol_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_UL][j][i][z], ptr, p_o_xu_cfg->mtu, xran_max_prb);
679 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_UL][j][i][z], ptr, p_o_xu_cfg->mtu);
682 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_PrbMapUl, ptr, p_o_xu_cfg->mtu);
685 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
686 if(p_o_xu_cfg->RunSlotPrbMapBySymbolEnable){
687 xran_init_PrbMap_by_symbol_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], ptr, p_o_xu_cfg->mtu, xran_max_prb);
690 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], ptr, p_o_xu_cfg->mtu);
693 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_PrbMapDl, ptr, p_o_xu_cfg->mtu);
700 if(p_o_xu_cfg->appMode == APP_O_RU){
702 eInterfaceType = XRANCP_PRB_MAP_IN_RX;
703 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
704 XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, size_of_prb_map);
705 if(XRAN_STATUS_SUCCESS != status) {
706 rte_panic("Failed at xran_bm_init, status %d\n", status);
709 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++) {
710 for(z = 0; z < xran_max_antenna_nr; z++){
711 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
712 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
713 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
714 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
715 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
716 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulCpRxPrbMapBbuIoBufCtrl[j][i][z];
718 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
719 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
720 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
721 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
722 if(XRAN_STATUS_SUCCESS != status) {
723 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
725 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
726 psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
729 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
730 memset(p_rb_map, 0, size_of_prb_map);
732 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
733 memcpy(ptr, p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], size_of_prb_map);
735 memcpy(ptr, p_o_xu_cfg->p_PrbMapDl, size_of_prb_map);
743 eInterfaceType = XRANCP_PRB_MAP_IN_TX;
744 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
745 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, size_of_prb_map);
746 if(XRAN_STATUS_SUCCESS != status){
747 rte_panic("Failed at xran_bm_init, status %d\n", status);
750 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++) {
751 for(z = 0; z < xran_max_antenna_nr; z++){
752 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
753 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
754 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
755 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
756 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
757 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulCpTxPrbMapBbuIoBufCtrl[j][i][z];
759 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
760 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
761 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
762 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
763 if(XRAN_STATUS_SUCCESS != status) {
764 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
766 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
767 psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
769 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
770 memset(p_rb_map, 0, size_of_prb_map);
772 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
773 memcpy(ptr, p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], size_of_prb_map);
775 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_PrbMapDl, ptr, p_o_xu_cfg->mtu);
783 // add prach rx buffer
784 for(i = 0; i<nSectorNum; i++)
786 eInterfaceType = XRANFTHRACH_IN;
787 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i],&psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
788 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_antenna_nr_prach*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, PRACH_PLAYBACK_BUFFER_BYTES);
789 if(XRAN_STATUS_SUCCESS != status) {
790 rte_panic("Failed at xran_bm_init, status %d\n", status);
792 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++)
794 for(z = 0; z < xran_max_antenna_nr_prach; z++){
795 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].bValid = 0;
796 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
797 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
798 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
799 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = xran_max_antenna_nr_prach; // ant number.
800 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHPrachRxBuffers[j][i][z][0];
801 psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHPrachRxBuffersDecomp[j][i][z][0];
802 for(k = 0; k< XRAN_NUM_OF_SYMBOL_PER_SLOT; k++)
804 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = PRACH_PLAYBACK_BUFFER_BYTES;
805 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
806 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
808 if (p_o_xu_cfg->appMode == APP_O_RU) {
809 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
810 if(XRAN_STATUS_SUCCESS != status) {
811 rte_panic("Failed at xran_bm_allocate_buffer, status %d\n",status);
813 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
814 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *)mb;
816 u32dptr = (uint32_t*)(ptr);
817 memset(u32dptr, 0x0, PRACH_PLAYBACK_BUFFER_BYTES);
819 psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[j][i][z].sBufferList.pBuffers[k].pData= (uint8_t *)ptr;
826 /* add SRS rx buffer */
827 printf("%s:%d: xran_max_ant_array_elm_nr %d\n", __FUNCTION__, __LINE__, xran_max_ant_array_elm_nr);
828 for(i = 0; i<nSectorNum && xran_max_ant_array_elm_nr; i++) {
829 eInterfaceType = XRANSRS_IN;
830 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i],&psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
831 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_ant_array_elm_nr*XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT)-1, nSW_ToFpga_FTH_TxBufferLen);
833 if(XRAN_STATUS_SUCCESS != status) {
834 rte_panic("Failed at xran_bm_init, status %d\n", status);
836 for(j = 0; j < XRAN_N_FE_BUF_LEN; j++) {
837 for(z = 0; z < xran_max_ant_array_elm_nr; z++){
838 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].bValid = 0;
839 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
840 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
841 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
842 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = xran_max_ant_array_elm_nr; /* ant number */
843 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHSrsRxBuffers[j][i][z][0];
844 for(k = 0; k < XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT; k++)
846 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = nSW_ToFpga_FTH_TxBufferLen;
847 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
848 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
849 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
850 if(XRAN_STATUS_SUCCESS != status) {
851 rte_panic("Failed at xran_bm_allocate_buffer, status %d\n",status);
853 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
854 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *)mb;
856 u32dptr = (uint32_t*)(ptr);
857 memset(u32dptr, 0x0, nSW_ToFpga_FTH_TxBufferLen);
864 eInterfaceType = XRANSRS_PRB_MAP_IN;
865 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
866 NEXT_POW2(XRAN_N_FE_BUF_LEN*xran_max_ant_array_elm_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT)-1, size_of_prb_map);
867 if(XRAN_STATUS_SUCCESS != status) {
868 rte_panic("Failed at xran_bm_init, status %d\n", status);
871 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++) {
872 for(z = 0; z < xran_max_ant_array_elm_nr; z++) {
873 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
874 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
875 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
876 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
877 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
878 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHSrsRxPrbMapBuffers[j][i][z];
880 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
881 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
882 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
883 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
884 if(XRAN_STATUS_SUCCESS != status) {
885 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
887 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
888 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
891 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
892 memset(p_rb_map, 0, size_of_prb_map);
894 if (p_o_xu_cfg->appMode == APP_O_DU) {
895 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
896 memcpy(ptr, p_o_xu_cfg->p_RunSrsSlotPrbMap[XRAN_DIR_UL][j][i][z], size_of_prb_map);
898 memcpy(ptr, p_o_xu_cfg->p_PrbMapSrs, size_of_prb_map);
901 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
902 memcpy(ptr, p_o_xu_cfg->p_RunSrsSlotPrbMap[XRAN_DIR_DL][j][i][z], size_of_prb_map);
904 //memcpy(ptr, p_o_xu_cfg->p_PrbMapSrs, size_of_prb_map);
905 xran_init_PrbMap_from_cfg(p_o_xu_cfg->p_PrbMapSrs, ptr, p_o_xu_cfg->mtu);
913 for(i=0; i<nSectorNum; i++)
915 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
917 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
918 pFthTxBuffer[i][z][j] = NULL;
919 pFthTxPrbMapBuffer[i][z][j] = NULL;
920 pFthRxBuffer[i][z][j] = NULL;
921 pFthRxPrbMapBuffer[i][z][j] = NULL;
922 pFthRxRachBuffer[i][z][j] = NULL;
923 pFthRxRachBufferDecomp[i][z][j] = NULL;
924 pFthRxCpPrbMapBuffer[i][z][j] = NULL;
925 pFthTxCpPrbMapBuffer[i][z][j] = NULL;
927 for(z = 0; z < XRAN_MAX_ANT_ARRAY_ELM_NR; z++){
928 pFthRxSrsBuffer[i][z][j] = NULL;
929 pFthRxSrsPrbMapBuffer[i][z][j] = NULL;
934 for(i=0; i<nSectorNum; i++)
936 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
938 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
939 pFthTxBuffer[i][z][j] = &(psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList);
940 pFthTxPrbMapBuffer[i][z][j] = &(psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
941 pFthRxBuffer[i][z][j] = &(psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList);
942 pFthRxPrbMapBuffer[i][z][j] = &(psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
943 pFthRxRachBuffer[i][z][j] = &(psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList);
944 pFthRxRachBufferDecomp[i][z][j] = &(psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[j][i][z].sBufferList);
945 pFthRxCpPrbMapBuffer[i][z][j] = &(psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
946 pFthTxCpPrbMapBuffer[i][z][j] = &(psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
949 for(z = 0; z < XRAN_MAX_ANT_ARRAY_ELM_NR && xran_max_ant_array_elm_nr; z++){
950 pFthRxSrsBuffer[i][z][j] = &(psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList);
951 pFthRxSrsPrbMapBuffer[i][z][j] = &(psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
956 if(NULL != psBbuIo->nInstanceHandle[o_xu_id])
958 /* add pusch callback */
959 for (i = 0; i<nSectorNum; i++)
961 psBbuIo->RxCbTag[o_xu_id][i].oXuId = o_xu_id;
962 psBbuIo->RxCbTag[o_xu_id][i].cellId = i;
963 psBbuIo->RxCbTag[o_xu_id][i].symbol = 0;
964 psBbuIo->RxCbTag[o_xu_id][i].slotiId = 0;
965 if(psBbuIo->bbu_offload)
966 xran_5g_fronthault_config (psBbuIo->nInstanceHandle[o_xu_id][i],
968 pFthTxPrbMapBuffer[i],
970 pFthRxPrbMapBuffer[i],
971 app_io_xran_fh_bbu_rx_callback, &psBbuIo->RxCbTag[o_xu_id][i]);
973 xran_5g_fronthault_config (psBbuIo->nInstanceHandle[o_xu_id][i],
975 pFthTxPrbMapBuffer[i],
977 pFthRxPrbMapBuffer[i],
978 app_io_xran_fh_rx_callback, &psBbuIo->RxCbTag[o_xu_id][i]);
980 /* add BFWs callback here */
981 for (i = 0; i<nSectorNum; i++) {
982 psBbuIo->BfwCbTag[o_xu_id][i].cellId = o_xu_id;
983 psBbuIo->BfwCbTag[o_xu_id][i].cellId = i;
984 psBbuIo->BfwCbTag[o_xu_id][i].symbol = 0;
985 psBbuIo->BfwCbTag[o_xu_id][i].slotiId = 0;
987 if(psBbuIo->bbu_offload)
988 xran_5g_bfw_config(psBbuIo->nInstanceHandle[o_xu_id][i],
989 pFthRxCpPrbMapBuffer[i],
990 pFthTxCpPrbMapBuffer[i],
991 app_io_xran_fh_bbu_rx_bfw_callback,&psBbuIo->BfwCbTag[o_xu_id][i]);
994 xran_5g_bfw_config(psBbuIo->nInstanceHandle[o_xu_id][i],
995 pFthRxCpPrbMapBuffer[i],
996 pFthTxCpPrbMapBuffer[i],
997 app_io_xran_fh_rx_bfw_callback,&psBbuIo->BfwCbTag[o_xu_id][i]);
999 /* add prach callback here */
1000 for (i = 0; i<nSectorNum; i++)
1002 psBbuIo->PrachCbTag[o_xu_id][i].oXuId = o_xu_id;
1003 psBbuIo->PrachCbTag[o_xu_id][i].cellId = i;
1004 psBbuIo->PrachCbTag[o_xu_id][i].symbol = 0;
1005 psBbuIo->PrachCbTag[o_xu_id][i].slotiId = 0;
1006 if(psBbuIo->bbu_offload)
1007 xran_5g_prach_req(psBbuIo->nInstanceHandle[o_xu_id][i], pFthRxRachBuffer[i],pFthRxRachBufferDecomp[i],
1008 app_io_xran_fh_bbu_rx_prach_callback,&psBbuIo->PrachCbTag[o_xu_id][i]);
1010 xran_5g_prach_req(psBbuIo->nInstanceHandle[o_xu_id][i], pFthRxRachBuffer[i],pFthRxRachBufferDecomp[i],
1011 app_io_xran_fh_rx_prach_callback,&psBbuIo->PrachCbTag[o_xu_id][i]);
1014 /* add SRS callback here */
1015 for (i = 0; i<nSectorNum && xran_max_ant_array_elm_nr; i++) {
1016 psBbuIo->SrsCbTag[o_xu_id][i].oXuId = o_xu_id;
1017 psBbuIo->SrsCbTag[o_xu_id][i].cellId = i;
1018 psBbuIo->SrsCbTag[o_xu_id][i].symbol = 0;
1019 psBbuIo->SrsCbTag[o_xu_id][i].slotiId = 0;
1020 if(psBbuIo->bbu_offload)
1021 xran_5g_srs_req(psBbuIo->nInstanceHandle[o_xu_id][i], pFthRxSrsBuffer[i], pFthRxSrsPrbMapBuffer[i],
1022 app_io_xran_fh_bbu_rx_srs_callback,&psBbuIo->SrsCbTag[o_xu_id][i]);
1024 xran_5g_srs_req(psBbuIo->nInstanceHandle[o_xu_id][i], pFthRxSrsBuffer[i], pFthRxSrsPrbMapBuffer[i],
1025 app_io_xran_fh_rx_srs_callback,&psBbuIo->SrsCbTag[o_xu_id][i]);
1033 app_io_xran_ext_type1_populate(struct xran_prb_elm* p_pRbMapElm, char *p_bfw_buffer, uint32_t mtu, uint16_t* numSetBFW_total)
1035 xran_status_t status = XRAN_STATUS_SUCCESS;
1038 int16_t ext_sec_total = 0;
1039 int8_t * ext_buf = NULL;
1040 int8_t * ext_buf_start = NULL;
1042 ext_len = p_pRbMapElm->bf_weight.maxExtBufSize = mtu; /* MAX_RX_LEN; */ /* Maximum space of external buffer */
1043 if (p_pRbMapElm->bf_weight.p_ext_start)
1044 ext_buf = (int8_t *)p_pRbMapElm->bf_weight.p_ext_start;
1046 ext_buf = (int8_t *)xran_malloc(p_pRbMapElm->bf_weight.maxExtBufSize);
1049 rte_panic("xran_malloc return NULL [sz %d]\n", p_pRbMapElm->bf_weight.maxExtBufSize);
1052 ext_buf_start = ext_buf;
1053 ext_buf += (RTE_PKTMBUF_HEADROOM +
1054 sizeof(struct xran_ecpri_hdr) +
1055 sizeof(struct xran_cp_radioapp_section1_header));
1057 ext_len -= (RTE_PKTMBUF_HEADROOM +
1058 sizeof(struct xran_ecpri_hdr) +
1059 sizeof(struct xran_cp_radioapp_section1_header));
1061 ext_sec_total = xran_cp_populate_section_ext_1((int8_t *)ext_buf,
1063 (int16_t *) (p_bfw_buffer + (*numSetBFW_total*p_pRbMapElm->bf_weight.nAntElmTRx)*4),
1065 if(ext_sec_total > 0) {
1066 p_pRbMapElm->bf_weight.p_ext_start = ext_buf_start;
1067 p_pRbMapElm->bf_weight.p_ext_section = ext_buf;
1068 p_pRbMapElm->bf_weight.ext_section_sz = ext_sec_total;
1070 rte_panic("xran_cp_populate_section_ext_1 return error [%d]\n", ext_sec_total);
1072 rte_panic("xran_malloc return NULL\n");
1079 app_io_xran_ext_type11_populate(struct xran_prb_elm* p_pRbMapElm, char *p_tx_dl_bfw_buffer, uint32_t mtu)
1081 xran_status_t status = XRAN_STATUS_SUCCESS;
1085 int32_t n_max_set_bfw;
1087 p_pRbMapElm->bf_weight.maxExtBufSize = mtu; /* MAX_RX_LEN; */ /* Maximum space of external buffer */
1088 if (p_pRbMapElm->bf_weight.p_ext_start)
1089 extbuf = (uint8_t *)p_pRbMapElm->bf_weight.p_ext_start;
1091 extbuf = (uint8_t*)xran_malloc(p_pRbMapElm->bf_weight.maxExtBufSize);
1093 rte_panic("xran_malloc return NULL [sz %d]\n", p_pRbMapElm->bf_weight.maxExtBufSize);
1095 /* Check BFWs can be fit with MTU size */
1096 n_max_set_bfw = xran_cp_estimate_max_set_bfws(p_pRbMapElm->bf_weight.nAntElmTRx,
1097 p_pRbMapElm->bf_weight.bfwIqWidth,
1098 p_pRbMapElm->bf_weight.bfwCompMeth,
1101 if(p_pRbMapElm->bf_weight.numSetBFWs > n_max_set_bfw) {
1102 /* PRB elm doesn't fit into packet MTU size */
1103 rte_panic("BFWs are too large with MTU %d! (cfg:%d / max:%d)\n",
1104 mtu, p_pRbMapElm->bf_weight.numSetBFWs, n_max_set_bfw);
1108 /* Configure source buffer and beam ID of BFWs */
1109 for(i = 0; i < p_pRbMapElm->bf_weight.numSetBFWs; i++) {
1110 p_pRbMapElm->bf_weight.bfw[i].pBFWs = (uint8_t *)(p_tx_dl_bfw_buffer + p_pRbMapElm->bf_weight.nAntElmTRx*2*i);
1111 p_pRbMapElm->bf_weight.bfw[i].beamId = 0x7000+i;
1114 n_max_set_bfw = xran_cp_prepare_ext11_bfws(p_pRbMapElm->bf_weight.numSetBFWs,
1115 p_pRbMapElm->bf_weight.nAntElmTRx,
1116 p_pRbMapElm->bf_weight.bfwIqWidth,
1117 p_pRbMapElm->bf_weight.bfwCompMeth,
1119 p_pRbMapElm->bf_weight.maxExtBufSize,
1120 p_pRbMapElm->bf_weight.bfw);
1121 if(n_max_set_bfw > 0) {
1122 p_pRbMapElm->bf_weight.ext_section_sz = n_max_set_bfw;
1123 p_pRbMapElm->bf_weight.p_ext_start = (int8_t *)extbuf;
1125 rte_panic("Fail to prepare BFWs for extension 11!\n");
1131 app_io_xran_iq_content_init_cp_rb_map(struct xran_prb_map* pRbMap,
1132 enum xran_pkt_dir dir, int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, uint16_t nRBs)
1135 pRbMap->xran_port = 0;
1136 pRbMap->band_id = 0;
1137 pRbMap->cc_id = cc_id;
1138 pRbMap->ru_port_id = ant_id;
1139 pRbMap->tti_id = tti;
1140 pRbMap->start_sym_id = 0;
1141 pRbMap->nPrbElm = 1;
1142 pRbMap->prbMap[0].nRBStart = 0;
1143 pRbMap->prbMap[0].nRBSize = nRBs;
1144 pRbMap->prbMap[0].nStartSymb = 0;
1145 pRbMap->prbMap[0].numSymb = 14;
1146 pRbMap->prbMap[0].sec_desc[sym_id][0].iq_buffer_offset = 0;
1147 pRbMap->prbMap[0].sec_desc[sym_id][0].iq_buffer_len = nRBs *4L;
1148 pRbMap->prbMap[0].nBeamIndex = 0;
1149 pRbMap->prbMap[0].compMethod = XRAN_COMPMETHOD_NONE;
1156 app_io_xran_iq_content_init_cp_tx(uint8_t appMode, struct xran_fh_config *pXranConf,
1157 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1158 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t target_tti, int32_t flowId)
1161 struct xran_prb_map* pRbMap = NULL;
1162 char* dl_bfw_pos = NULL;
1164 int32_t tti_dst = target_tti % XRAN_N_FE_BUF_LEN;
1165 int32_t tti_src = target_tti % p_iq->numSlots;
1166 int32_t tx_dl_bfw_buffer_position = tti_src * (pXranConf->nDLRBs*pXranConf->nAntElmTRx)*4;
1167 uint16_t numSetBFW_total = 0;
1169 if(p_iq->p_tx_play_buffer[flowId]) {
1170 cc_id = cc_id % XRAN_MAX_SECTOR_NR;
1171 ant_id = ant_id % XRAN_MAX_ANTENNA_NR;
1172 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[tti_dst][cc_id][ant_id].sBufferList.pBuffers->pData;
1173 dl_bfw_pos = ((char*)p_iq->p_tx_dl_bfw_buffer[flowId]) + tx_dl_bfw_buffer_position;
1175 if (pXranConf->DynamicSectionEna == 0) {
1176 if(pRbMap->nPrbElm != 1 )
1177 app_io_xran_iq_content_init_cp_rb_map(pRbMap, XRAN_DIR_DL, cc_id, ant_id, sym_id, tti_dst, pXranConf->nDLRBs);
1178 } else if(pXranConf->ru_conf.xranCat == XRAN_CATEGORY_B
1179 && appMode == APP_O_DU
1180 && sym_id == 0) { /* BFWs are per slot */
1183 struct xran_prb_elm* p_pRbMapElm = NULL;
1185 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
1186 p_pRbMapElm = &pRbMap->prbMap[idxElm];
1187 p_pRbMapElm->bf_weight.nAntElmTRx = pXranConf->nAntElmTRx;
1189 if(p_pRbMapElm->BeamFormingType == XRAN_BEAM_WEIGHT && p_pRbMapElm->bf_weight_update) {
1190 if(p_pRbMapElm->bf_weight.extType == 1) {
1191 app_io_xran_ext_type1_populate(p_pRbMapElm, dl_bfw_pos, app_io_xran_fh_init.mtu, &numSetBFW_total);
1193 app_io_xran_ext_type11_populate(p_pRbMapElm, dl_bfw_pos, app_io_xran_fh_init.mtu);
1196 numSetBFW_total += p_pRbMapElm->bf_weight.numSetBFWs;
1200 printf("DL pRbMap ==NULL [%d][%d][%d]\n", tti_dst, cc_id, ant_id);
1204 //printf("flowId %d\n", flowId);
1212 app_io_xran_iq_content_init_cp_rx(uint8_t appMode, struct xran_fh_config *pXranConf,
1213 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1214 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t target_tti, int32_t flowId)
1216 struct xran_prb_map* pRbMap = NULL;
1217 char* ul_bfw_pos = NULL;
1219 int32_t tti_dst = target_tti % XRAN_N_FE_BUF_LEN;
1220 int32_t tti_src = target_tti % p_iq->numSlots;
1221 int32_t tx_ul_bfw_buffer_position = tti_src * (pXranConf->nULRBs*pXranConf->nAntElmTRx)*4;
1223 uint16_t numSetBFW_total = 0;
1225 cc_id = cc_id % XRAN_MAX_SECTOR_NR;
1226 ant_id = ant_id % XRAN_MAX_ANTENNA_NR;
1228 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[tti_dst][cc_id][ant_id].sBufferList.pBuffers->pData;
1229 ul_bfw_pos = ((char*)p_iq->p_tx_ul_bfw_buffer[flowId]) + tx_ul_bfw_buffer_position;
1231 if (pXranConf->DynamicSectionEna == 0) {
1232 if(pRbMap->nPrbElm != 1 )
1233 app_io_xran_iq_content_init_cp_rb_map(pRbMap, XRAN_DIR_UL, cc_id, ant_id, sym_id, tti_dst, pXranConf->nULRBs);
1234 } else if(pXranConf->ru_conf.xranCat == XRAN_CATEGORY_B
1235 && appMode == APP_O_DU
1238 struct xran_prb_elm* p_pRbMapElm = NULL;
1240 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
1241 p_pRbMapElm = &pRbMap->prbMap[idxElm];
1242 p_pRbMapElm->bf_weight.nAntElmTRx = pXranConf->nAntElmTRx;
1244 if(p_pRbMapElm->BeamFormingType == XRAN_BEAM_WEIGHT && p_pRbMapElm->bf_weight_update) {
1245 if(p_pRbMapElm->bf_weight.extType == 1) {
1246 app_io_xran_ext_type1_populate(p_pRbMapElm, ul_bfw_pos, app_io_xran_fh_init.mtu, &numSetBFW_total);
1248 app_io_xran_ext_type11_populate(p_pRbMapElm, ul_bfw_pos, app_io_xran_fh_init.mtu);
1250 } /* if(p_pRbMapElm->BeamFormingType == XRAN_BEAM_WEIGHT && p_pRbMapElm->bf_weight_update) */
1251 numSetBFW_total += p_pRbMapElm->bf_weight.numSetBFWs;
1252 } /* for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) */
1255 rte_panic("DL pRbMap ==NULL\n");
1262 app_io_xran_iq_content_init_up_tx(uint8_t appMode, struct xran_fh_config *pXranConf,
1263 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1264 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t target_tti, int32_t flowId)
1268 uint8_t* u8dptr = NULL;
1269 struct xran_prb_map* pRbMap = NULL;
1270 enum xran_comp_hdr_type staticEn = XRAN_COMP_HDR_TYPE_DYNAMIC;
1272 int32_t tti_dst = target_tti % XRAN_N_FE_BUF_LEN;
1273 int32_t tti_src = target_tti % p_iq->numSlots;
1274 int32_t tx_play_buffer_position = tti_src * (XRAN_NUM_OF_SYMBOL_PER_SLOT*pXranConf->nDLRBs*N_SC_PER_PRB*4) + (sym_id * pXranConf->nDLRBs*N_SC_PER_PRB*4);
1276 if (pXranConf != NULL)
1278 staticEn = pXranConf->ru_conf.xranCompHdrType;
1281 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[tti_dst][cc_id][ant_id].sBufferList.pBuffers->pData;
1282 pos = ((char*)p_iq->p_tx_play_buffer[flowId]) + tx_play_buffer_position;
1283 ptr = psIoCtrl->sFrontHaulTxBbuIoBufCtrl[tti_dst][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1287 u8dptr = (uint8_t*)ptr;
1288 int16_t payload_len = 0;
1290 uint8_t *dst = (uint8_t *)u8dptr;
1291 uint8_t *src = (uint8_t *)pos;
1292 uint16_t num_sections, idx, comp_method;
1293 uint16_t prb_per_section;
1294 struct xran_prb_elm* p_prbMapElm = &pRbMap->prbMap[idxElm];
1295 dst = xran_add_hdr_offset(dst, ((staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE));
1297 for (idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
1298 struct xran_section_desc *p_sec_desc = NULL;
1299 p_prbMapElm = &pRbMap->prbMap[idxElm];
1300 p_sec_desc = &p_prbMapElm->sec_desc[sym_id][0];
1302 if(p_prbMapElm->bf_weight.extType == 1)
1304 num_sections = p_prbMapElm->bf_weight.numSetBFWs;
1305 prb_per_section = p_prbMapElm->bf_weight.numBundPrb;
1310 prb_per_section = p_prbMapElm->UP_nRBSize;
1313 if(p_sec_desc == NULL) {
1314 rte_panic ("p_sec_desc == NULL\n");
1317 /* skip, if not scheduled */
1318 if(sym_id < p_prbMapElm->nStartSymb || sym_id >= p_prbMapElm->nStartSymb + p_prbMapElm->numSymb){
1319 p_sec_desc->iq_buffer_offset = 0;
1320 p_sec_desc->iq_buffer_len = 0;
1324 src = (uint8_t *)(pos + p_prbMapElm->UP_nRBStart*N_SC_PER_PRB*4L);
1325 p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(dst, u8dptr);
1326 p_sec_desc->iq_buffer_len = 0;
1328 for(idx=0; idx < num_sections ; idx++)
1330 //printf("\nidx %hu u8dptr %p dst %p",idx,u8dptr,dst);
1332 if((idx+1)*prb_per_section > p_prbMapElm->UP_nRBSize){
1333 prb_per_section = (p_prbMapElm->UP_nRBSize - idx*prb_per_section);
1336 if(p_prbMapElm->compMethod == XRAN_COMPMETHOD_NONE) {
1337 payload_len = prb_per_section*N_SC_PER_PRB*4L;
1338 memcpy(dst, src, payload_len);
1340 } else if ((p_prbMapElm->compMethod == XRAN_COMPMETHOD_BLKFLOAT) || (p_prbMapElm->compMethod == XRAN_COMPMETHOD_MODULATION)) {
1341 struct xranlib_compress_request bfp_com_req;
1342 struct xranlib_compress_response bfp_com_rsp;
1344 memset(&bfp_com_req, 0, sizeof(struct xranlib_compress_request));
1345 memset(&bfp_com_rsp, 0, sizeof(struct xranlib_compress_response));
1347 bfp_com_req.data_in = (int16_t*)src;
1348 bfp_com_req.numRBs = prb_per_section;
1349 bfp_com_req.len = prb_per_section*N_SC_PER_PRB*4L;
1350 bfp_com_req.compMethod = p_prbMapElm->compMethod;
1351 bfp_com_req.iqWidth = p_prbMapElm->iqWidth;
1352 bfp_com_req.ScaleFactor= p_prbMapElm->ScaleFactor;
1353 bfp_com_req.reMask = p_prbMapElm->reMask;
1355 bfp_com_rsp.data_out = (int8_t*)dst;
1356 bfp_com_rsp.len = 0;
1358 xranlib_compress(&bfp_com_req, &bfp_com_rsp);
1359 payload_len = bfp_com_rsp.len;
1362 printf ("p_prbMapElm->compMethod == %d is not supported\n",
1363 p_prbMapElm->compMethod);
1367 if(num_sections != 1)
1368 src += prb_per_section*N_SC_PER_PRB*4L;
1370 /* update RB map for given element */
1371 //p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(dst, u8dptr);
1372 p_sec_desc->iq_buffer_len += payload_len;
1374 /* add headroom for ORAN headers between IQs for chunk of RBs*/
1376 if(idx+1 == num_sections) /* Create space for (eth + eCPRI + radio app + section + comp) headers required by next prbElement */
1378 dst = xran_add_hdr_offset(dst, ((staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE));
1382 /* Create space for section/compression header in current prbElement */
1383 //TODO: Check if alignment required for this case
1384 dst += sizeof(struct data_section_hdr);
1385 p_sec_desc->iq_buffer_len += sizeof(struct data_section_hdr);
1387 comp_method = ((staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE);
1389 if( comp_method != XRAN_COMPMETHOD_NONE)
1391 dst += sizeof (struct data_section_compression_hdr);
1392 p_sec_desc->iq_buffer_len += sizeof(struct data_section_compression_hdr);
1395 } /*for num_section */
1396 } /* for (idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) */
1397 } /* if(ptr && pos) */
1399 rte_panic("ptr ==NULL\n");
1401 } /* if (pXranConf != NULL) */
1406 app_io_xran_iq_content_init_up_prach(uint8_t appMode, struct xran_fh_config *pXranConf,
1407 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1408 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
1412 uint32_t* u32dptr = NULL;
1414 if(p_iq->p_tx_prach_play_buffer[flowId]) {
1415 pos = ((char*)p_iq->p_tx_prach_play_buffer[flowId]);
1416 ptr = psIoCtrl->sFHPrachRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1419 int32_t compMethod = pXranConf->ru_conf.compMeth_PRACH;
1421 if(compMethod == XRAN_COMPMETHOD_NONE) {
1422 u32dptr = (uint32_t*)(ptr);
1423 memcpy(u32dptr, pos, RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[flowId]));
1424 } else if((compMethod == XRAN_COMPMETHOD_BLKFLOAT)
1425 || (compMethod == XRAN_COMPMETHOD_MODULATION)) {
1426 struct xranlib_compress_request comp_req;
1427 struct xranlib_compress_response comp_rsp;
1429 memset(&comp_req, 0, sizeof(struct xranlib_compress_request));
1430 memset(&comp_rsp, 0, sizeof(struct xranlib_compress_response));
1432 /* compress whole playback data */
1433 comp_req.data_in = (int16_t *)pos;
1434 comp_req.len = RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[flowId]);
1435 comp_req.numRBs = comp_req.len / 12 / 4; /* 12RE, 4bytes */
1436 comp_req.compMethod = compMethod;
1437 comp_req.iqWidth = pXranConf->ru_conf.iqWidth_PRACH;
1438 comp_req.ScaleFactor = 0; /* TODO */
1439 comp_req.reMask = 0xfff; /* TODO */
1441 comp_rsp.data_out = (int8_t *)ptr;
1444 xranlib_compress(&comp_req, &comp_rsp);
1446 printf ("p_prbMapElm->compMethod == %d is not supported\n", compMethod);
1449 } else { /* if(ptr && pos) */
1450 printf("prach ptr ==NULL\n");
1453 } /* if(p_iq->p_tx_prach_play_buffer[flowId]) */
1459 app_io_xran_iq_content_init_up_srs(uint8_t appMode, struct xran_fh_config *pXranConf,
1460 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1461 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
1463 struct xran_prb_map * pRbMap = NULL;
1466 uint8_t* u8dptr = NULL;
1467 enum xran_comp_hdr_type staticEn = XRAN_COMP_HDR_TYPE_DYNAMIC;
1469 if (pXranConf != NULL)
1471 staticEn = pXranConf->ru_conf.xranCompHdrType;
1474 if(p_iq->p_tx_srs_play_buffer[flowId]) {
1475 pos = ((char*)p_iq->p_tx_srs_play_buffer[flowId]) + p_iq->tx_srs_play_buffer_position[flowId];;
1476 ptr = psIoCtrl->sFHSrsRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1477 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
1479 if(ptr && pos && pRbMap) {
1481 u8dptr = (uint8_t*)ptr;
1482 int16_t payload_len = 0;
1484 uint8_t *dst = (uint8_t *)u8dptr;
1485 uint8_t *src = (uint8_t *)pos;
1486 struct xran_prb_elm* p_prbMapElm = &pRbMap->prbMap[idxElm];
1487 dst = xran_add_hdr_offset(dst, (staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE);
1488 for (idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
1489 struct xran_section_desc *p_sec_desc = NULL;
1490 p_prbMapElm = &pRbMap->prbMap[idxElm];
1491 p_sec_desc = &p_prbMapElm->sec_desc[sym_id][0];
1493 if(p_sec_desc == NULL){
1494 rte_panic ("p_sec_desc == NULL\n");
1497 /* skip, if not scheduled */
1498 if(sym_id < p_prbMapElm->nStartSymb || sym_id >= p_prbMapElm->nStartSymb + p_prbMapElm->numSymb) {
1499 p_sec_desc->iq_buffer_offset = 0;
1500 p_sec_desc->iq_buffer_len = 0;
1504 src = (uint8_t *)(pos + p_prbMapElm->UP_nRBStart*N_SC_PER_PRB*4L);
1506 if(p_prbMapElm->compMethod == XRAN_COMPMETHOD_NONE) {
1507 payload_len = p_prbMapElm->UP_nRBSize*N_SC_PER_PRB*4L;
1508 memcpy(dst, src, payload_len);
1510 } else if (p_prbMapElm->compMethod == XRAN_COMPMETHOD_BLKFLOAT
1511 || (p_prbMapElm->compMethod == XRAN_COMPMETHOD_MODULATION)) {
1512 struct xranlib_compress_request bfp_com_req;
1513 struct xranlib_compress_response bfp_com_rsp;
1515 memset(&bfp_com_req, 0, sizeof(struct xranlib_compress_request));
1516 memset(&bfp_com_rsp, 0, sizeof(struct xranlib_compress_response));
1518 bfp_com_req.data_in = (int16_t*)src;
1519 bfp_com_req.numRBs = p_prbMapElm->UP_nRBSize;
1520 bfp_com_req.len = p_prbMapElm->UP_nRBSize*N_SC_PER_PRB*4L;
1521 bfp_com_req.compMethod = p_prbMapElm->compMethod;
1522 bfp_com_req.iqWidth = p_prbMapElm->iqWidth;
1523 bfp_com_req.ScaleFactor= p_prbMapElm->ScaleFactor;
1524 bfp_com_req.reMask = p_prbMapElm->reMask;
1526 bfp_com_rsp.data_out = (int8_t*)dst;
1527 bfp_com_rsp.len = 0;
1529 xranlib_compress(&bfp_com_req, &bfp_com_rsp);
1530 payload_len = bfp_com_rsp.len;
1532 rte_panic ("p_prbMapElm->compMethod == %d is not supported\n", p_prbMapElm->compMethod);
1535 /* update RB map for given element */
1536 p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(dst, u8dptr);
1537 p_sec_desc->iq_buffer_len = payload_len;
1539 /* add headroom for ORAN headers between IQs for chunk of RBs*/
1541 dst = xran_add_hdr_offset(dst, (staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE);
1544 rte_panic("[%d %d %d] %p %p %p ==NULL\n",tti, ant_id, sym_id, ptr, pos, pRbMap);
1547 p_iq->tx_srs_play_buffer_position[flowId] += pXranConf->nULRBs*N_SC_PER_PRB*4;
1548 if(p_iq->tx_srs_play_buffer_position[flowId] >= p_iq->tx_srs_play_buffer_size[flowId])
1549 p_iq->tx_srs_play_buffer_position[flowId] = 0;
1557 app_io_xran_iq_content_init(uint32_t o_xu_id, RuntimeConfig *p_o_xu_cfg)
1559 xran_status_t status;
1561 struct bbu_xran_io_if *psBbuIo = app_io_xran_if_get();
1562 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
1563 int32_t nSectorIndex[XRAN_MAX_SECTOR_NR];
1565 int32_t cc_id, ant_id, sym_id, tti;
1568 //uint8_t frame_id = 0;
1569 //uint8_t subframe_id = 0;
1570 //uint8_t slot_id = 0;
1573 struct xran_fh_config *pXranConf = &app_io_xran_fh_config[o_xu_id];
1574 //struct xran_fh_init *pXranInit = &app_io_xran_fh_init;
1575 struct o_xu_buffers * p_iq = NULL;
1577 uint32_t xran_max_antenna_nr = RTE_MAX(p_o_xu_cfg->numAxc, p_o_xu_cfg->numUlAxc);
1578 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(p_o_xu_cfg->antElmTRx, xran_max_antenna_nr);
1579 uint32_t xran_max_antenna_nr_prach = RTE_MIN(xran_max_antenna_nr, XRAN_MAX_PRACH_ANT_NUM);
1581 if(psBbuIo == NULL){
1582 rte_panic("psBbuIo == NULL\n");
1585 if(psIoCtrl == NULL){
1586 rte_panic("psIoCtrl == NULL\n");
1589 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++) {
1590 nSectorIndex[nSectorNum] = nSectorNum;
1592 nSectorNum = p_o_xu_cfg->numCC;
1593 printf ("app_io_xran_iq_content_init\n");
1595 if(p_o_xu_cfg->p_buff) {
1596 p_iq = p_o_xu_cfg->p_buff;
1598 rte_panic("Error p_o_xu_cfg->p_buff\n");
1602 for(cc_id = 0; cc_id < nSectorNum; cc_id++) {
1603 for(tti = 0; tti < XRAN_N_FE_BUF_LEN; tti ++) {
1604 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++){
1605 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1606 if(p_o_xu_cfg->appMode == APP_O_DU) {
1607 flowId = p_o_xu_cfg->numAxc * cc_id + ant_id;
1609 flowId = p_o_xu_cfg->numUlAxc * cc_id + ant_id;
1612 if ((status = app_io_xran_iq_content_init_cp_tx(p_o_xu_cfg->appMode, pXranConf,
1613 psBbuIo, psIoCtrl, p_iq,
1614 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1615 rte_panic("app_io_xran_iq_content_init_cp_tx");
1617 if ((status = app_io_xran_iq_content_init_up_tx(p_o_xu_cfg->appMode, pXranConf,
1618 psBbuIo, psIoCtrl, p_iq,
1619 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1620 rte_panic("app_io_xran_iq_content_init_up_tx");
1622 if ((status = app_io_xran_iq_content_init_cp_rx(p_o_xu_cfg->appMode, pXranConf,
1623 psBbuIo, psIoCtrl, p_iq,
1624 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1625 rte_panic("app_io_xran_iq_content_init_cp_rx");
1631 /* prach TX for RU only */
1632 if(p_o_xu_cfg->appMode == APP_O_RU && p_o_xu_cfg->enablePrach) {
1633 for(ant_id = 0; ant_id < xran_max_antenna_nr_prach; ant_id++) {
1634 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1635 flowId = xran_max_antenna_nr_prach * cc_id + ant_id;
1636 if ((status = app_io_xran_iq_content_init_up_prach(p_o_xu_cfg->appMode, pXranConf,
1637 psBbuIo, psIoCtrl, p_iq,
1638 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1639 rte_panic("app_io_xran_iq_content_init_up_prach");
1644 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1646 snprintf(fname, sizeof(fname), "./logs/aftercomp-%d.bin", sym_id);
1647 sys_save_buf_to_file(fname,
1648 "Compressed PRACH IQ Samples in binary format",
1649 psIoCtrl->sFHPrachRxBbuIoBufCtrl[0][0][0].sBufferList.pBuffers[sym_id].pData,
1650 RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[0]),
1652 snprintf(fname, sizeof(fname), "./logs/aftercomp-%d.txt", sym_id);
1653 sys_save_buf_to_file_txt(fname,
1654 "Compressed PRACH IQ Samples in human readable format",
1655 psIoCtrl->sFHPrachRxBbuIoBufCtrl[0][0][0].sBufferList.pBuffers[sym_id].pData,
1656 RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[0]),
1661 /* SRS TX for RU only */
1662 if(p_o_xu_cfg->appMode == APP_O_RU && p_o_xu_cfg->enableSrs) {
1663 for(ant_id = 0; ant_id < xran_max_ant_array_elm_nr; ant_id++) {
1664 for(sym_id = 0; sym_id < XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT; sym_id++) {
1665 flowId = p_o_xu_cfg->antElmTRx*cc_id + ant_id;
1666 if ((status = app_io_xran_iq_content_init_up_srs(p_o_xu_cfg->appMode, pXranConf,
1667 psBbuIo, psIoCtrl, p_iq,
1668 cc_id, ant_id, sym_id, tti, flowId)) != 0){
1669 rte_panic("app_io_xran_iq_content_init_up_srs");
1680 void app_io_xran_if_stop(void)
1682 xran_status_t status = 0;
1684 status += xran_mm_destroy(app_io_xran_handle)*2;
1686 if(XRAN_STATUS_SUCCESS != status) {
1687 printf("Failed at xran_mm_destroy, status %d\n",status);
1688 iAssert(status == XRAN_STATUS_SUCCESS);
1693 app_io_xran_iq_content_get_up_prach(uint8_t appMode, struct xran_fh_config *pXranConf,
1694 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1695 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t target_tti, int32_t flowId)
1697 xran_status_t status = 0;
1698 int32_t prach_len = 0;
1702 int32_t tti_src = target_tti % XRAN_N_FE_BUF_LEN;
1703 int32_t tti_dst = target_tti % p_iq->numSlots;
1704 int32_t prach_log_buffer_position;
1706 prach_len = (3 * pXranConf->ru_conf.iqWidth_PRACH) * pXranConf->prach_conf.numPrbc; /* 12RE*2pairs/8bits (12*2/8=3)*/
1707 prach_log_buffer_position = tti_dst * (XRAN_NUM_OF_SYMBOL_PER_SLOT*prach_len) + (sym_id * prach_len);
1709 if(p_iq->p_prach_log_buffer[flowId]) {
1710 pos = ((char*)p_iq->p_prach_log_buffer[flowId]) + prach_log_buffer_position;
1711 ptr = psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[tti_src][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1713 int32_t compMethod = pXranConf->ru_conf.compMeth_PRACH;
1714 if(compMethod == XRAN_COMPMETHOD_NONE) {
1715 memcpy(pos, (uint32_t *)(ptr), prach_len);
1717 struct xranlib_decompress_request decomp_req;
1718 struct xranlib_decompress_response decomp_rsp;
1721 memset(&decomp_req, 0, sizeof(struct xranlib_decompress_request));
1722 memset(&decomp_rsp, 0, sizeof(struct xranlib_decompress_response));
1724 switch(compMethod) {
1725 case XRAN_COMPMETHOD_BLKFLOAT: parm_size = 1; break;
1726 case XRAN_COMPMETHOD_MODULATION: parm_size = 0; break;
1731 decomp_req.data_in = (int8_t *)ptr;
1732 decomp_req.numRBs = pXranConf->prach_conf.numPrbc;
1733 decomp_req.len = (3 * pXranConf->ru_conf.iqWidth_PRACH + parm_size) * pXranConf->prach_conf.numPrbc; /* 12RE*2pairs/8bits (12*2/8=3)*/
1734 decomp_req.compMethod = compMethod;
1735 decomp_req.iqWidth = pXranConf->ru_conf.iqWidth_PRACH;
1736 decomp_req.ScaleFactor = 0; /* TODO */
1737 decomp_req.reMask = 0xfff; /* TODO */
1739 decomp_rsp.data_out = (int16_t *)pos;
1742 xranlib_decompress(&decomp_req, &decomp_rsp);
1745 } /* if(p_iq->p_prach_log_buffer[flowId]) */
1751 app_io_xran_iq_content_get_up_srs(uint8_t appMode, struct xran_fh_config *pXranConf,
1752 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1753 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t target_tti, int32_t flowId)
1755 xran_status_t status = 0;
1757 struct xran_prb_map *pRbMap = NULL;
1758 struct xran_prb_elm *pRbElm = NULL;
1759 struct xran_section_desc *p_sec_desc = NULL;
1765 int32_t tti_src = target_tti % XRAN_N_FE_BUF_LEN;
1766 int32_t tti_dst = target_tti % p_iq->numSlots;
1767 int32_t srs_log_buffer_position = tti_dst * (XRAN_NUM_OF_SYMBOL_PER_SLOT*pXranConf->nULRBs*N_SC_PER_PRB*4) + (sym_id * pXranConf->nULRBs*N_SC_PER_PRB*4);
1769 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[tti_src][cc_id][ant_id].sBufferList.pBuffers->pData;
1771 rte_panic("pRbMap == NULL\n");
1774 if(p_iq->p_srs_log_buffer[flowId]) {
1775 pRbElm = &pRbMap->prbMap[0];
1776 /*if(pRbMap->nPrbElm == 1) {
1777 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb) {
1778 pos = ((char*)p_iq->p_srs_log_buffer[flowId]) + p_iq->srs_log_buffer_position[flowId];
1779 ptr = psIoCtrl->sFHSrsRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1781 int32_t payload_len = 0;
1782 u32dptr = (uint32_t*)(ptr);
1783 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1784 struct xranlib_decompress_request bfp_decom_req;
1785 struct xranlib_decompress_response bfp_decom_rsp;
1788 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1789 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1790 switch(pRbElm->compMethod) {
1791 case XRAN_COMPMETHOD_BLKFLOAT:
1794 case XRAN_COMPMETHOD_MODULATION:
1801 bfp_decom_req.data_in = (int8_t *)u32dptr;
1802 bfp_decom_req.numRBs = pRbElm->nRBSize;
1803 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size) * pRbElm->nRBSize;
1804 bfp_decom_req.compMethod = pRbElm->compMethod;
1805 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1807 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1808 bfp_decom_rsp.len = 0;
1810 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1811 payload_len = bfp_decom_rsp.len;
1814 u32dptr = (uint32_t*)(ptr);
1815 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4L , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4L);
1818 printf("[%d][%d][%d][%d]ptr ==NULL\n",tti,cc_id,ant_id, sym_id);
1822 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
1823 pRbElm = &pRbMap->prbMap[idxElm];
1824 p_sec_desc = &pRbElm->sec_desc[sym_id][0];
1825 if(p_iq->p_srs_log_buffer[flowId] && p_sec_desc) {
1826 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb){
1827 pos = ((char*)p_iq->p_srs_log_buffer[flowId]) + srs_log_buffer_position;
1828 ptr = p_sec_desc->pData;
1830 u32dptr = (uint32_t*)(ptr);
1831 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1832 struct xranlib_decompress_request bfp_decom_req;
1833 struct xranlib_decompress_response bfp_decom_rsp;
1836 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1837 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1838 switch(pRbElm->compMethod) {
1839 case XRAN_COMPMETHOD_BLKFLOAT:
1842 case XRAN_COMPMETHOD_MODULATION:
1849 bfp_decom_req.data_in = (int8_t *)u32dptr;
1850 bfp_decom_req.numRBs = pRbElm->nRBSize;
1851 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size)*pRbElm->nRBSize;
1852 bfp_decom_req.compMethod = pRbElm->compMethod;
1853 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1855 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1856 bfp_decom_rsp.len = 0;
1858 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1860 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4 , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4);
1865 printf("(%d : %d : %d) flowid %d, p_sec_desc is empty\n", target_tti, sym_id, ant_id,flowId);
1875 app_io_xran_iq_content_get_up_rx(uint8_t appMode, struct xran_fh_config *pXranConf,
1876 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1877 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t target_tti, int32_t flowId)
1879 xran_status_t status = 0;
1881 int32_t idxSection = 0;
1882 struct xran_prb_map *pRbMap = NULL;
1883 struct xran_prb_elm *pRbElm = NULL;
1884 struct xran_prb_elm *pRbElmRx = NULL;
1885 struct xran_section_desc *p_sec_desc = NULL;
1887 uint16_t idxDesc = 0;
1892 struct data_section_hdr* data_hdr;
1893 uint16_t num_prbu = 0, start_prbu = 0, prb_idx;
1895 const int16_t data_size = sizeof(struct data_section_hdr);
1896 const int16_t compr_size = sizeof(struct data_section_compression_hdr);
1898 int32_t tti_src = target_tti % XRAN_N_FE_BUF_LEN;
1899 int32_t tti_dst = target_tti % p_iq->numSlots;
1900 int32_t rx_log_buffer_position = tti_dst * (XRAN_NUM_OF_SYMBOL_PER_SLOT*pXranConf->nULRBs*N_SC_PER_PRB*4) + (sym_id * pXranConf->nULRBs*N_SC_PER_PRB*4);
1902 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[tti_src][cc_id][ant_id].sBufferList.pBuffers->pData;
1903 if(pRbMap == NULL) {
1904 printf("pRbMap == NULL\n");
1908 if(0 == pXranConf->RunSlotPrbMapBySymbolEnable)
1910 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
1911 pRbElm = &pRbMap->prbMap[idxElm];
1912 for (idxDesc = 0; idxDesc < XRAN_MAX_FRAGMENT; idxDesc++) {
1913 p_sec_desc = &pRbElm->sec_desc[sym_id][idxDesc];
1914 if(p_iq->p_rx_log_buffer[flowId] && p_sec_desc){
1915 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb){
1916 if (!p_sec_desc->pCtrl)
1918 pos = ((char*)p_iq->p_rx_log_buffer[flowId]) + rx_log_buffer_position;
1919 ptr = p_sec_desc->pData;
1921 data_hdr = (struct data_section_hdr *)src;
1922 num_prbu = p_sec_desc->num_prbu;
1923 start_prbu = p_sec_desc->start_prbu;
1924 prb_idx = start_prbu;
1925 while(prb_idx < (pRbElm->UP_nRBStart + pRbElm->UP_nRBSize) && num_prbu != 0){
1927 u32dptr = (uint32_t*)(src);
1928 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1929 struct xranlib_decompress_request bfp_decom_req;
1930 struct xranlib_decompress_response bfp_decom_rsp;
1931 int32_t parm_size = 0;
1933 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1934 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1935 switch(pRbElm->compMethod) {
1936 case XRAN_COMPMETHOD_BLKFLOAT:
1939 case XRAN_COMPMETHOD_MODULATION:
1946 bfp_decom_req.data_in = (int8_t *)u32dptr;
1947 bfp_decom_req.numRBs = num_prbu;
1948 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size)*num_prbu;
1949 bfp_decom_req.compMethod = pRbElm->compMethod;
1950 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1951 bfp_decom_req.reMask = pRbElm->reMask;
1952 bfp_decom_req.ScaleFactor= pRbElm->ScaleFactor;
1954 bfp_decom_rsp.data_out = (int16_t *)(pos + start_prbu*N_SC_PER_PRB*4);
1955 bfp_decom_rsp.len = 0;
1957 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1958 src += (3 * pRbElm->iqWidth + parm_size)*num_prbu;
1961 memcpy(pos + start_prbu*N_SC_PER_PRB*4 , u32dptr, num_prbu*N_SC_PER_PRB*4);
1962 src += num_prbu*N_SC_PER_PRB*4;
1966 // printf("%s:%d [%d][%d][%d][%d]src ==NULL\n", __FUNCTION__, __LINE__, tti,cc_id,ant_id, sym_id);
1968 data_hdr = (struct data_section_hdr *)src;
1969 if(pRbElm->bf_weight.extType == 1 && data_hdr != NULL)
1971 data_hdr->fields.all_bits = rte_be_to_cpu_32(data_hdr->fields.all_bits);
1972 num_prbu = data_hdr->fields.num_prbu;
1973 start_prbu = data_hdr->fields.start_prbu;
1974 prb_idx += num_prbu;
1976 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE && pXranConf->ru_conf.xranCompHdrType == XRAN_COMP_HDR_TYPE_DYNAMIC)
1985 printf("(%d : %d : %d) flowid %d, p_sec_desc is empty\n", target_tti, sym_id, ant_id,flowId);
1991 for(idxSection = 0; idxSection < pRbMap->nPrbElm; idxSection++ ) {
1992 pRbElmRx = &pRbMap->prbMap[idxSection];
1993 for (idxDesc = 0; idxDesc < XRAN_MAX_FRAGMENT; idxDesc++) {
1994 p_sec_desc = &pRbElmRx->sec_desc[sym_id][idxDesc];
1995 if(p_iq->p_rx_log_buffer[flowId] && p_sec_desc){
1996 if(!p_sec_desc->pCtrl)
1998 for(idxElm = idxSection; idxElm < pRbMap->nPrbElm; idxElm++ )
2000 pRbElm = &pRbMap->prbMap[idxElm];
2001 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb) {
2002 pos = ((char*)p_iq->p_rx_log_buffer[flowId]) + rx_log_buffer_position;
2003 ptr = p_sec_desc->pData;
2005 data_hdr = (struct data_section_hdr *)src;
2006 num_prbu = p_sec_desc->num_prbu;
2007 start_prbu = p_sec_desc->start_prbu;
2008 prb_idx = start_prbu;
2009 while(prb_idx < (pRbElm->UP_nRBStart + pRbElm->UP_nRBSize) && num_prbu != 0){
2010 // while(prb_idx < (pRbElm->nRBStart + pRbElm->nRBSize) && num_prbu != 0){
2012 u32dptr = (uint32_t*)(src);
2013 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
2014 struct xranlib_decompress_request bfp_decom_req;
2015 struct xranlib_decompress_response bfp_decom_rsp;
2016 int32_t parm_size = 0;
2018 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
2019 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
2020 switch(pRbElm->compMethod) {
2021 case XRAN_COMPMETHOD_BLKFLOAT:
2024 case XRAN_COMPMETHOD_MODULATION:
2031 bfp_decom_req.data_in = (int8_t *)u32dptr;
2032 bfp_decom_req.numRBs = num_prbu;
2033 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size)*num_prbu;
2034 bfp_decom_req.compMethod = pRbElm->compMethod;
2035 bfp_decom_req.iqWidth = pRbElm->iqWidth;
2036 bfp_decom_req.reMask = pRbElm->reMask;
2037 bfp_decom_req.ScaleFactor= pRbElm->ScaleFactor;
2039 bfp_decom_rsp.data_out = (int16_t *)(pos + start_prbu*N_SC_PER_PRB*4);
2040 bfp_decom_rsp.len = 0;
2042 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
2043 src += (3 * pRbElm->iqWidth + parm_size)*num_prbu;
2046 memcpy(pos + start_prbu*N_SC_PER_PRB*4 , u32dptr, num_prbu*N_SC_PER_PRB*4);
2047 src += num_prbu*N_SC_PER_PRB*4;
2051 // printf("%s:%d [%d][%d][%d][%d]src ==NULL\n", __FUNCTION__, __LINE__, tti,cc_id,ant_id, sym_id);
2053 data_hdr = (struct data_section_hdr *)src;
2054 if(pRbElm->bf_weight.extType == 1 && data_hdr != NULL)
2056 data_hdr->fields.all_bits = rte_be_to_cpu_32(data_hdr->fields.all_bits);
2057 num_prbu = data_hdr->fields.num_prbu;
2058 start_prbu = data_hdr->fields.start_prbu;
2059 prb_idx += num_prbu;
2061 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE && pXranConf->ru_conf.xranCompHdrType == XRAN_COMP_HDR_TYPE_DYNAMIC)
2072 printf("(%d : %d : %d) flowid %d, p_sec_desc is empty\n", target_tti, sym_id, ant_id,flowId);
2082 app_io_xran_iq_content_get(uint32_t o_xu_id, RuntimeConfig *p_o_xu_cfg)
2084 struct bbu_xran_io_if *psBbuIo = app_io_xran_if_get();
2085 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
2086 xran_status_t status;
2087 int32_t nSectorIndex[XRAN_MAX_SECTOR_NR];
2089 int32_t cc_id, ant_id, sym_id, tti;
2091 struct xran_fh_config *pXranConf = &app_io_xran_fh_config[o_xu_id];
2094 uint32_t xran_max_antenna_nr = RTE_MAX(p_o_xu_cfg->numAxc, p_o_xu_cfg->numUlAxc);
2095 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(p_o_xu_cfg->antElmTRx, xran_max_antenna_nr);
2096 uint32_t xran_max_antenna_nr_prach = RTE_MIN(xran_max_antenna_nr, XRAN_MAX_PRACH_ANT_NUM);
2098 struct o_xu_buffers *p_iq = NULL;
2101 rte_panic("psBbuIo == NULL\n");
2103 if(psIoCtrl == NULL)
2104 rte_panic("psIoCtrl == NULL\n");
2106 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++) {
2107 nSectorIndex[nSectorNum] = nSectorNum;
2110 nSectorNum = p_o_xu_cfg->numCC;
2111 printf ("app_io_xran_iq_content_get\n");
2113 if(p_o_xu_cfg->p_buff) {
2114 p_iq = p_o_xu_cfg->p_buff;
2116 printf("Error p_o_xu_cfg->p_buff\n");
2120 if(p_o_xu_cfg->p_buff) {
2121 p_iq = p_o_xu_cfg->p_buff;
2123 rte_panic("Error p_o_xu_cfg->p_buff\n");
2126 if(psBbuIo->bbu_offload == 0) {
2127 for(cc_id = 0; cc_id <nSectorNum; cc_id++) {
2128 for(tti = 0; tti < XRAN_N_FE_BUF_LEN; tti++) {
2129 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) {
2130 if(p_o_xu_cfg->appMode == APP_O_RU)
2131 flowId = p_o_xu_cfg->numAxc * cc_id + ant_id;
2133 flowId = p_o_xu_cfg->numUlAxc * cc_id + ant_id;
2135 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
2136 if ((status = app_io_xran_iq_content_get_up_rx(p_o_xu_cfg->appMode, pXranConf,
2137 psBbuIo, psIoCtrl, p_iq,
2138 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
2139 rte_panic("app_io_xran_iq_content_get_up_rx");
2142 if(p_o_xu_cfg->appMode == APP_O_DU && p_o_xu_cfg->enablePrach && (ant_id < xran_max_antenna_nr_prach)) {
2143 flowId = xran_max_antenna_nr_prach * cc_id + ant_id;
2144 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
2145 if ((status = app_io_xran_iq_content_get_up_prach(p_o_xu_cfg->appMode, pXranConf,
2146 psBbuIo, psIoCtrl, p_iq,
2147 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
2148 rte_panic("app_io_xran_iq_content_get_up_prach");
2152 } /* for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) */
2154 /* SRS RX for O-DU only */
2155 if(p_o_xu_cfg->appMode == APP_O_DU && p_o_xu_cfg->enableSrs) {
2156 for(ant_id = 0; ant_id < xran_max_ant_array_elm_nr; ant_id++) {
2157 flowId = p_o_xu_cfg->antElmTRx*cc_id + ant_id;
2158 for(sym_id = 0; sym_id < XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT; sym_id++) {
2159 if ((status = app_io_xran_iq_content_get_up_srs(p_o_xu_cfg->appMode, pXranConf,
2160 psBbuIo, psIoCtrl, p_iq,
2161 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
2162 rte_panic("app_io_xran_iq_content_get_up_srs");
2168 /* CP - DL for O-RU only */
2169 if(p_o_xu_cfg->appMode == APP_O_RU && p_o_xu_cfg->xranCat == 1 && p_o_xu_cfg->extType == 1) {
2170 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) {
2171 uint16_t idxElm = 0;
2175 struct xran_prb_map *pRbMap = NULL;
2176 struct xran_prb_elm *pRbElm = NULL;
2177 int8_t *iq_data = NULL;
2178 uint16_t N = pXranConf->nAntElmTRx;
2180 int32_t tti_dst = tti % p_iq->numSlots ;
2181 int32_t tx_dl_bfw_buffer_position = tti_dst * (pXranConf->nDLRBs*pXranConf->nAntElmTRx)*4;
2183 struct xran_cp_radioapp_section_ext1 * ext1;
2185 uint8_t total_ext1_len = 0;
2186 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHCpRxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
2187 if(pRbMap == NULL) {
2188 printf("pRbMap == NULL\n");
2191 flowId = p_o_xu_cfg->numAxc * cc_id + ant_id;
2192 pos = (char*)p_iq->p_tx_dl_bfw_log_buffer[flowId] + tx_dl_bfw_buffer_position;
2193 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
2194 pRbElm = &pRbMap->prbMap[idxElm];
2195 bfwIqWidth = pRbElm->bf_weight.bfwIqWidth;
2196 if(p_iq->p_tx_dl_bfw_log_buffer[flowId]) {
2197 src = (char *)pRbElm->bf_weight.p_ext_section;
2198 if(!pRbElm->bf_weight.p_ext_start)
2201 for(i = 0; i < (pRbElm->bf_weight.numSetBFWs); i++) {
2203 src_buf = (uint8_t *)src;
2204 ext1 = (struct xran_cp_radioapp_section_ext1 *)src_buf;
2205 src_buf += sizeof(struct xran_cp_radioapp_section_ext1);
2209 iq_data = (int8_t *)(src_buf);
2210 total_ext1_len = ext1->extLen * XRAN_SECTIONEXT_ALIGN;
2211 if (pRbElm->bf_weight.bfwCompMeth == XRAN_COMPMETHOD_NONE){
2212 iq_size = N * bfwIqWidth * 2; // total in bits
2213 parm_size = iq_size>>3; // total in bytes (/8)
2214 if(iq_size%8) parm_size++; // round up
2216 memcpy(pos,iq_data,len);
2219 switch(pRbElm->bf_weight.bfwCompMeth) {
2220 case XRAN_BFWCOMPMETHOD_BLKFLOAT:
2224 case XRAN_BFWCOMPMETHOD_BLKSCALE:
2228 case XRAN_BFWCOMPMETHOD_ULAW:
2232 case XRAN_BFWCOMPMETHOD_BEAMSPACE:
2233 parm_size = N>>3; if(N%8) parm_size++; parm_size *= 8;
2240 /* Get BF weights */
2241 iq_size = N * bfwIqWidth * 2; // total in bits
2242 parm_size = iq_size>>3; // total in bytes (/8)
2243 if(iq_size%8) parm_size++; // round up
2245 struct xranlib_decompress_request bfp_decom_req;
2246 struct xranlib_decompress_response bfp_decom_rsp;
2248 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
2249 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
2251 bfp_decom_req.data_in = (int8_t*)iq_data;
2252 bfp_decom_req.numRBs = 1;
2253 bfp_decom_req.numDataElements = N*2;
2254 bfp_decom_req.len = len;
2255 bfp_decom_req.compMethod = pRbElm->bf_weight.bfwCompMeth;
2256 bfp_decom_req.iqWidth = bfwIqWidth;
2258 bfp_decom_rsp.data_out = (int16_t *)(pos);
2259 bfp_decom_rsp.len = 0;
2260 xranlib_decompress_bfw(&bfp_decom_req, &bfp_decom_rsp);
2264 src += (total_ext1_len + sizeof(struct xran_cp_radioapp_section1));
2269 } /* for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) */
2270 } /* if(p_o_xu_cfg->appMode == APP_O_RU) */
2273 /* CP - UL for O-RU only */
2274 if(p_o_xu_cfg->appMode == APP_O_RU && p_o_xu_cfg->xranCat == 1 && p_o_xu_cfg->extType == 1) {
2275 for(ant_id = 0; ant_id < p_o_xu_cfg->numUlAxc; ant_id++) {
2276 uint16_t idxElm = 0;
2280 struct xran_prb_map *pRbMap = NULL;
2281 struct xran_prb_elm *pRbElm = NULL;
2282 int8_t *iq_data = NULL;
2283 uint16_t N = pXranConf->nAntElmTRx;
2286 struct xran_cp_radioapp_section_ext1 * ext1;
2288 uint8_t total_ext1_len = 0;
2289 int32_t tti_dst = tti % p_iq->numSlots;
2290 int32_t tx_ul_bfw_buffer_position = tti_dst * (pXranConf->nULRBs*pXranConf->nAntElmTRx)*4;
2291 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHCpTxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
2292 if(pRbMap == NULL) {
2293 printf("pRbMap == NULL\n");
2296 flowId = p_o_xu_cfg->numUlAxc * cc_id + ant_id;
2297 pos = ((char*)p_iq->p_tx_ul_bfw_log_buffer[flowId]) + tx_ul_bfw_buffer_position;
2298 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
2299 pRbElm = &pRbMap->prbMap[idxElm];
2300 bfwIqWidth = pRbElm->bf_weight.bfwIqWidth;
2301 if(p_iq->p_tx_ul_bfw_log_buffer[flowId]) {
2302 src = (char *)pRbElm->bf_weight.p_ext_section;
2303 if(!pRbElm->bf_weight.p_ext_start)
2306 for(i = 0; i < (pRbElm->bf_weight.numSetBFWs); i++) {
2308 src_buf = (uint8_t *)src;
2309 ext1 = (struct xran_cp_radioapp_section_ext1 *)src_buf;
2310 src_buf += sizeof(struct xran_cp_radioapp_section_ext1);
2314 iq_data = (int8_t *)(src_buf);
2315 total_ext1_len = ext1->extLen * XRAN_SECTIONEXT_ALIGN;
2316 if (pRbElm->bf_weight.bfwCompMeth == XRAN_COMPMETHOD_NONE){
2317 iq_size = N * bfwIqWidth * 2; // total in bits
2318 parm_size = iq_size>>3; // total in bytes (/8)
2319 if(iq_size%8) parm_size++; // round up
2321 memcpy(pos,iq_data,len);
2324 switch(pRbElm->bf_weight.bfwCompMeth) {
2325 case XRAN_BFWCOMPMETHOD_BLKFLOAT:
2329 case XRAN_BFWCOMPMETHOD_BLKSCALE:
2333 case XRAN_BFWCOMPMETHOD_ULAW:
2337 case XRAN_BFWCOMPMETHOD_BEAMSPACE:
2338 parm_size = N>>3; if(N%8) parm_size++; parm_size *= 8;
2345 /* Get BF weights */
2346 iq_size = N * bfwIqWidth * 2; // total in bits
2347 parm_size = iq_size>>3; // total in bytes (/8)
2348 if(iq_size%8) parm_size++; // round up
2350 struct xranlib_decompress_request bfp_decom_req;
2351 struct xranlib_decompress_response bfp_decom_rsp;
2353 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
2354 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
2356 bfp_decom_req.data_in = (int8_t*)iq_data;
2357 bfp_decom_req.numRBs = 1;
2358 bfp_decom_req.numDataElements = N*2;
2359 bfp_decom_req.len = len;
2360 bfp_decom_req.compMethod = pRbElm->bf_weight.bfwCompMeth;
2361 bfp_decom_req.iqWidth = bfwIqWidth;
2363 bfp_decom_rsp.data_out = (int16_t *)(pos);
2364 bfp_decom_rsp.len = 0;
2365 xranlib_decompress_bfw(&bfp_decom_req, &bfp_decom_rsp);
2369 src += (total_ext1_len + sizeof(struct xran_cp_radioapp_section1));
2373 } /* for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) */
2374 } /* if(p_o_xu_cfg->appMode == APP_O_RU) */
2375 } /*for(tti = 0; tti < XRAN_N_FE_BUF_LEN; tti++)*/
2376 } /*for(cc_id = 0; cc_id <nSectorNum; cc_id++)*/
2382 app_io_xran_eAxCid_conf_set(struct xran_eaxcid_config *p_eAxC_cfg, RuntimeConfig * p_s_cfg)
2387 if(p_s_cfg->DU_Port_ID_bitwidth && p_s_cfg->BandSector_ID_bitwidth && p_s_cfg->CC_ID_bitwidth
2388 && p_s_cfg->RU_Port_ID_bitwidth &&
2389 (p_s_cfg->DU_Port_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth + p_s_cfg->CC_ID_bitwidth
2390 + p_s_cfg->RU_Port_ID_bitwidth) == 16 /* eAxC ID subfields are 16 bits */
2391 ){ /* bit mask provided */
2394 p_eAxC_cfg->bit_ruPortId = 0;
2395 for (shift = 0; shift < p_s_cfg->RU_Port_ID_bitwidth; shift++){
2398 p_eAxC_cfg->mask_ruPortId = mask;
2400 p_eAxC_cfg->bit_ccId = p_s_cfg->RU_Port_ID_bitwidth;
2402 for (shift = p_s_cfg->RU_Port_ID_bitwidth; shift < p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth; shift++){
2405 p_eAxC_cfg->mask_ccId = mask;
2408 p_eAxC_cfg->bit_bandSectorId = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth;
2410 for (shift = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth; shift < p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth; shift++){
2413 p_eAxC_cfg->mask_bandSectorId = mask;
2415 p_eAxC_cfg->bit_cuPortId = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth;
2417 for (shift = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth;
2418 shift < p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth + p_s_cfg->DU_Port_ID_bitwidth; shift++){
2421 p_eAxC_cfg->mask_cuPortId = mask;
2424 } else { /* bit mask config is not provided */
2425 switch (p_s_cfg->xranCat){
2426 case XRAN_CATEGORY_A: {
2427 p_eAxC_cfg->mask_cuPortId = 0xf000;
2428 p_eAxC_cfg->mask_bandSectorId = 0x0f00;
2429 p_eAxC_cfg->mask_ccId = 0x00f0;
2430 p_eAxC_cfg->mask_ruPortId = 0x000f;
2431 p_eAxC_cfg->bit_cuPortId = 12;
2432 p_eAxC_cfg->bit_bandSectorId = 8;
2433 p_eAxC_cfg->bit_ccId = 4;
2434 p_eAxC_cfg->bit_ruPortId = 0;
2437 case XRAN_CATEGORY_B: {
2438 p_eAxC_cfg->mask_cuPortId = 0xf000;
2439 p_eAxC_cfg->mask_bandSectorId = 0x0c00;
2440 p_eAxC_cfg->mask_ccId = 0x0300;
2441 p_eAxC_cfg->mask_ruPortId = 0x00ff; /* more than [0-127] eAxC */
2442 p_eAxC_cfg->bit_cuPortId = 12;
2443 p_eAxC_cfg->bit_bandSectorId = 10;
2444 p_eAxC_cfg->bit_ccId = 8;
2445 p_eAxC_cfg->bit_ruPortId = 0;
2449 rte_panic("Incorrect Category\n");
2453 if(p_s_cfg->xranCat == XRAN_CATEGORY_A)
2454 p_s_cfg->numUlAxc = p_s_cfg->numAxc;
2456 printf("bit_cuPortId %2d mask 0x%04x\n",p_eAxC_cfg->bit_cuPortId, p_eAxC_cfg->mask_cuPortId);
2457 printf("bit_bandSectorId %2d mask 0x%04x\n",p_eAxC_cfg->bit_bandSectorId, p_eAxC_cfg->mask_bandSectorId);
2458 printf("bit_ccId %2d mask 0x%04x\n",p_eAxC_cfg->bit_ccId, p_eAxC_cfg->mask_ccId);
2459 printf("ruPortId %2d mask 0x%04x\n",p_eAxC_cfg->bit_ruPortId, p_eAxC_cfg->mask_ruPortId);
2465 app_io_xran_fh_config_init(UsecaseConfig* p_use_cfg, RuntimeConfig* p_o_xu_cfg, struct xran_fh_init* p_xran_fh_init, struct xran_fh_config* p_xran_fh_cfg)
2469 int32_t o_xu_id = 0;
2470 uint32_t nCenterFreq = 0;
2471 struct xran_prb_map* pRbMap = NULL;
2473 memset(p_xran_fh_cfg, 0, sizeof(struct xran_fh_config));
2475 o_xu_id = p_o_xu_cfg->o_xu_id;
2477 p_xran_fh_cfg->nDLRBs = app_xran_get_num_rbs(p_o_xu_cfg->xranTech, p_o_xu_cfg->mu_number, p_o_xu_cfg->nDLBandwidth, p_o_xu_cfg->nDLAbsFrePointA);
2478 p_xran_fh_cfg->nULRBs = app_xran_get_num_rbs(p_o_xu_cfg->xranTech, p_o_xu_cfg->mu_number, p_o_xu_cfg->nULBandwidth, p_o_xu_cfg->nULAbsFrePointA);
2480 if(p_o_xu_cfg->DynamicSectionEna == 0){
2481 pRbMap = p_o_xu_cfg->p_PrbMapDl;
2483 pRbMap->dir = XRAN_DIR_DL;
2484 pRbMap->xran_port = 0;
2485 pRbMap->band_id = 0;
2487 pRbMap->ru_port_id = 0;
2489 pRbMap->start_sym_id = 0;
2490 pRbMap->nPrbElm = 1;
2491 pRbMap->prbMap[0].nStartSymb = 0;
2492 pRbMap->prbMap[0].numSymb = 14;
2493 pRbMap->prbMap[0].nRBStart = 0;
2494 pRbMap->prbMap[0].nRBSize = p_xran_fh_cfg->nDLRBs;
2495 pRbMap->prbMap[0].nBeamIndex = 0;
2496 pRbMap->prbMap[0].compMethod = XRAN_COMPMETHOD_NONE;
2497 pRbMap->prbMap[0].iqWidth = 16;
2499 pRbMap = p_o_xu_cfg->p_PrbMapUl;
2500 pRbMap->dir = XRAN_DIR_UL;
2501 pRbMap->xran_port = 0;
2502 pRbMap->band_id = 0;
2504 pRbMap->ru_port_id = 0;
2506 pRbMap->start_sym_id = 0;
2507 pRbMap->nPrbElm = 1;
2508 pRbMap->prbMap[0].nStartSymb = 0;
2509 pRbMap->prbMap[0].numSymb = 14;
2510 pRbMap->prbMap[0].nRBStart = 0;
2511 pRbMap->prbMap[0].nRBSize = p_xran_fh_cfg->nULRBs;
2512 pRbMap->prbMap[0].nBeamIndex = 0;
2513 pRbMap->prbMap[0].compMethod = XRAN_COMPMETHOD_NONE;
2514 pRbMap->prbMap[0].iqWidth = 16;
2516 pRbMap = p_o_xu_cfg->p_PrbMapDl;
2518 pRbMap->dir = XRAN_DIR_DL;
2519 pRbMap->xran_port = 0;
2520 pRbMap->band_id = 0;
2522 pRbMap->ru_port_id = 0;
2524 pRbMap->start_sym_id = 0;
2526 pRbMap = p_o_xu_cfg->p_PrbMapUl;
2527 pRbMap->dir = XRAN_DIR_UL;
2528 pRbMap->xran_port = 0;
2529 pRbMap->band_id = 0;
2531 pRbMap->ru_port_id = 0;
2533 pRbMap->start_sym_id = 0;
2535 pRbMap = p_o_xu_cfg->p_PrbMapSrs;
2536 pRbMap->dir = XRAN_DIR_UL;
2537 pRbMap->xran_port = 0;
2538 pRbMap->band_id = 0;
2540 pRbMap->ru_port_id = 0;
2542 pRbMap->start_sym_id = 0;
2545 p_xran_fh_cfg->sector_id = 0;
2546 p_xran_fh_cfg->dpdk_port = o_xu_id;
2547 p_xran_fh_cfg->nCC = p_o_xu_cfg->numCC;
2548 p_xran_fh_cfg->neAxc = p_o_xu_cfg->numAxc;
2549 p_xran_fh_cfg->neAxcUl = p_o_xu_cfg->numUlAxc;
2550 p_xran_fh_cfg->nAntElmTRx = p_o_xu_cfg->antElmTRx;
2552 p_xran_fh_cfg->frame_conf.nFrameDuplexType = p_o_xu_cfg->nFrameDuplexType;
2553 p_xran_fh_cfg->frame_conf.nNumerology = p_o_xu_cfg->mu_number;
2554 p_xran_fh_cfg->frame_conf.nTddPeriod = p_o_xu_cfg->nTddPeriod;
2556 for (i = 0; i < p_o_xu_cfg->nTddPeriod; i++){
2557 p_xran_fh_cfg->frame_conf.sSlotConfig[i] = p_o_xu_cfg->sSlotConfig[i];
2560 p_xran_fh_cfg->prach_conf.nPrachSubcSpacing = p_o_xu_cfg->mu_number;
2561 p_xran_fh_cfg->prach_conf.nPrachFreqStart = 0;
2562 p_xran_fh_cfg->prach_conf.nPrachFilterIdx = XRAN_FILTERINDEX_PRACH_ABC;
2563 p_xran_fh_cfg->prach_conf.nPrachConfIdx = p_o_xu_cfg->prachConfigIndex;
2564 p_xran_fh_cfg->prach_conf.nPrachConfIdxLTE = p_o_xu_cfg->prachConfigIndexLTE; //will be used in case of dss only
2565 p_xran_fh_cfg->prach_conf.nPrachFreqOffset = -792;
2567 p_xran_fh_cfg->srs_conf.symbMask = p_o_xu_cfg->srsSymMask; // deprecated
2569 if(p_o_xu_cfg->numAxc > XRAN_MAX_PRACH_ANT_NUM)
2570 p_xran_fh_cfg->srs_conf.eAxC_offset = p_o_xu_cfg->numAxc + XRAN_MAX_PRACH_ANT_NUM; /* PUSCH, PRACH, SRS */
2572 p_xran_fh_cfg->srs_conf.eAxC_offset = 2 * p_o_xu_cfg->numAxc; /* PUSCH, PRACH, SRS */
2573 p_xran_fh_cfg->srs_conf.slot = p_o_xu_cfg->srsSlot;
2574 p_xran_fh_cfg->srs_conf.ndm_offset = p_o_xu_cfg->srsNdmOffset;
2575 p_xran_fh_cfg->srs_conf.ndm_txduration = p_o_xu_cfg->srsNdmTxDuration;
2577 p_xran_fh_cfg->ru_conf.xranTech = p_o_xu_cfg->xranTech;
2578 p_xran_fh_cfg->ru_conf.xranCompHdrType = p_o_xu_cfg->CompHdrType;
2579 p_xran_fh_cfg->ru_conf.xranCat = p_o_xu_cfg->xranCat;
2581 if (p_xran_fh_cfg->ru_conf.xranCat == XRAN_CATEGORY_A)
2582 p_xran_fh_cfg->neAxcUl = p_xran_fh_cfg->neAxc;
2584 p_xran_fh_cfg->ru_conf.iqWidth = p_o_xu_cfg->p_PrbMapDl->prbMap[0].iqWidth;
2586 if (p_o_xu_cfg->compression == 0)
2587 p_xran_fh_cfg->ru_conf.compMeth = XRAN_COMPMETHOD_NONE;
2589 p_xran_fh_cfg->ru_conf.compMeth = XRAN_COMPMETHOD_BLKFLOAT;
2591 p_xran_fh_cfg->ru_conf.compMeth_PRACH = p_o_xu_cfg->prachCompMethod;
2592 if (p_o_xu_cfg->prachCompMethod == 0)
2593 p_o_xu_cfg->prachiqWidth = 16;
2594 p_xran_fh_cfg->ru_conf.iqWidth_PRACH = p_o_xu_cfg->prachiqWidth;
2596 p_xran_fh_cfg->ru_conf.fftSize = 0;
2597 while (p_o_xu_cfg->nULFftSize >>= 1)
2598 ++p_xran_fh_cfg->ru_conf.fftSize;
2600 p_xran_fh_cfg->ru_conf.byteOrder = (p_o_xu_cfg->nebyteorderswap == 1) ? XRAN_NE_BE_BYTE_ORDER : XRAN_CPU_LE_BYTE_ORDER ;
2601 p_xran_fh_cfg->ru_conf.iqOrder = (p_o_xu_cfg->iqswap == 1) ? XRAN_Q_I_ORDER : XRAN_I_Q_ORDER;
2603 printf("FFT Order %d\n", p_xran_fh_cfg->ru_conf.fftSize);
2605 nCenterFreq = p_o_xu_cfg->nDLAbsFrePointA + (((p_xran_fh_cfg->nDLRBs * N_SC_PER_PRB) / 2) * app_xran_get_scs(p_o_xu_cfg->mu_number));
2606 p_xran_fh_cfg->nDLCenterFreqARFCN = app_xran_cal_nrarfcn(nCenterFreq);
2607 printf("DL center freq %d DL NR-ARFCN %d\n", nCenterFreq, p_xran_fh_cfg->nDLCenterFreqARFCN);
2609 nCenterFreq = p_o_xu_cfg->nULAbsFrePointA + (((p_xran_fh_cfg->nULRBs * N_SC_PER_PRB) / 2) * app_xran_get_scs(p_o_xu_cfg->mu_number));
2610 p_xran_fh_cfg->nULCenterFreqARFCN = app_xran_cal_nrarfcn(nCenterFreq);
2611 printf("UL center freq %d UL NR-ARFCN %d\n", nCenterFreq, p_xran_fh_cfg->nULCenterFreqARFCN);
2613 p_xran_fh_cfg->bbdev_dec = NULL;
2614 p_xran_fh_cfg->bbdev_enc = NULL;
2616 p_xran_fh_cfg->log_level = 1;
2618 p_xran_fh_cfg->max_sections_per_slot = RTE_MAX(p_o_xu_cfg->max_sections_per_slot, XRAN_MIN_SECTIONS_PER_SLOT);
2619 p_xran_fh_cfg->max_sections_per_symbol = RTE_MAX(p_o_xu_cfg->max_sections_per_symbol, XRAN_MIN_SECTIONS_PER_SLOT);
2620 p_xran_fh_cfg->RunSlotPrbMapBySymbolEnable = p_o_xu_cfg->RunSlotPrbMapBySymbolEnable;
2622 printf("Max Sections: %d per symb %d per slot\n", p_xran_fh_cfg->max_sections_per_slot, p_xran_fh_cfg->max_sections_per_symbol);
2623 if(p_o_xu_cfg->maxFrameId)
2624 p_xran_fh_cfg->ru_conf.xran_max_frame = p_o_xu_cfg->maxFrameId;
2626 p_xran_fh_cfg->Tadv_cp_dl = p_o_xu_cfg->Tadv_cp_dl;
2627 p_xran_fh_cfg->T2a_min_cp_dl = p_o_xu_cfg->T2a_min_cp_dl;
2628 p_xran_fh_cfg->T2a_max_cp_dl = p_o_xu_cfg->T2a_max_cp_dl;
2629 p_xran_fh_cfg->T2a_min_cp_ul = p_o_xu_cfg->T2a_min_cp_ul;
2630 p_xran_fh_cfg->T2a_max_cp_ul = p_o_xu_cfg->T2a_max_cp_ul;
2631 p_xran_fh_cfg->T2a_min_up = p_o_xu_cfg->T2a_min_up;
2632 p_xran_fh_cfg->T2a_max_up = p_o_xu_cfg->T2a_max_up;
2633 p_xran_fh_cfg->Ta3_min = p_o_xu_cfg->Ta3_min;
2634 p_xran_fh_cfg->Ta3_max = p_o_xu_cfg->Ta3_max;
2635 p_xran_fh_cfg->T1a_min_cp_dl = p_o_xu_cfg->T1a_min_cp_dl;
2636 p_xran_fh_cfg->T1a_max_cp_dl = p_o_xu_cfg->T1a_max_cp_dl;
2637 p_xran_fh_cfg->T1a_min_cp_ul = p_o_xu_cfg->T1a_min_cp_ul;
2638 p_xran_fh_cfg->T1a_max_cp_ul = p_o_xu_cfg->T1a_max_cp_ul;
2639 p_xran_fh_cfg->T1a_min_up = p_o_xu_cfg->T1a_min_up;
2640 p_xran_fh_cfg->T1a_max_up = p_o_xu_cfg->T1a_max_up;
2641 p_xran_fh_cfg->Ta4_min = p_o_xu_cfg->Ta4_min;
2642 p_xran_fh_cfg->Ta4_max = p_o_xu_cfg->Ta4_max;
2644 p_xran_fh_cfg->enableCP = p_o_xu_cfg->enableCP;
2645 p_xran_fh_cfg->prachEnable = p_o_xu_cfg->enablePrach;
2646 p_xran_fh_cfg->srsEnable = p_o_xu_cfg->enableSrs;
2647 p_xran_fh_cfg->puschMaskEnable = p_o_xu_cfg->puschMaskEnable;
2648 p_xran_fh_cfg->puschMaskSlot = p_o_xu_cfg->puschMaskSlot;
2649 p_xran_fh_cfg->debugStop = p_o_xu_cfg->debugStop;
2650 p_xran_fh_cfg->debugStopCount = p_o_xu_cfg->debugStopCount;
2651 p_xran_fh_cfg->DynamicSectionEna = p_o_xu_cfg->DynamicSectionEna;
2652 p_xran_fh_cfg->GPS_Alpha = p_o_xu_cfg->GPS_Alpha;
2653 p_xran_fh_cfg->GPS_Beta = p_o_xu_cfg->GPS_Beta;
2655 p_xran_fh_cfg->cp_vlan_tag = p_o_xu_cfg->cp_vlan_tag;
2656 p_xran_fh_cfg->up_vlan_tag = p_o_xu_cfg->up_vlan_tag;
2658 p_xran_fh_cfg->dssEnable = p_o_xu_cfg->dssEnable;
2659 p_xran_fh_cfg->dssPeriod = p_o_xu_cfg->dssPeriod;
2660 for(i=0; i<p_o_xu_cfg->dssPeriod; i++) {
2661 p_xran_fh_cfg->technology[i] = p_o_xu_cfg->technology[i];
2669 app_io_xran_fh_init_init(UsecaseConfig* p_use_cfg, RuntimeConfig* p_o_xu_cfg, struct xran_fh_init* p_xran_fh_init)
2673 int32_t o_xu_id = 0;
2674 int32_t pf_link_id = 0;
2675 int32_t num_vfs_cu_p = 2;
2678 memset(p_xran_fh_init, 0, sizeof(struct xran_fh_init));
2680 if(p_o_xu_cfg->appMode == APP_O_DU) {
2681 printf("set O-DU\n");
2682 p_xran_fh_init->io_cfg.id = 0;/* O-DU */
2683 p_xran_fh_init->io_cfg.core = p_use_cfg->io_core;
2684 p_xran_fh_init->io_cfg.system_core = p_use_cfg->system_core;
2685 p_xran_fh_init->io_cfg.pkt_proc_core = p_use_cfg->io_worker; /* do not start */
2686 p_xran_fh_init->io_cfg.pkt_proc_core_64_127 = p_use_cfg->io_worker_64_127;
2687 p_xran_fh_init->io_cfg.pkt_aux_core = 0; /* do not start*/
2688 p_xran_fh_init->io_cfg.timing_core = p_use_cfg->io_core;
2689 p_xran_fh_init->io_cfg.dpdkIoVaMode = p_use_cfg->iova_mode;
2690 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].initiator_en = p_use_cfg->owdmInitEn;
2691 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].measMethod = p_use_cfg->owdmMeasMeth;
2692 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].numberOfSamples = p_use_cfg->owdmNumSamps;
2693 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].filterType = p_use_cfg->owdmFltType;
2694 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].responseTo = p_use_cfg->owdmRspTo;
2695 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].measState = p_use_cfg->owdmMeasState;
2696 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].measId = p_use_cfg->owdmMeasId;
2697 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].owdm_enable = p_use_cfg->owdmEnable;
2698 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].owdm_PlLength = p_use_cfg->owdmPlLength;
2699 p_xran_fh_init->dlCpProcBurst = p_use_cfg->dlCpProcBurst;
2702 printf("set O-RU\n");
2703 p_xran_fh_init->io_cfg.id = 1; /* O-RU*/
2704 p_xran_fh_init->io_cfg.core = p_use_cfg->io_core;
2705 p_xran_fh_init->io_cfg.system_core = p_use_cfg->system_core;
2706 p_xran_fh_init->io_cfg.pkt_proc_core = p_use_cfg->io_worker; /* do not start */
2707 p_xran_fh_init->io_cfg.pkt_aux_core = 0; /* do not start */
2708 p_xran_fh_init->io_cfg.timing_core = p_use_cfg->io_core;
2709 p_xran_fh_init->io_cfg.dpdkIoVaMode = p_use_cfg->iova_mode;
2710 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].initiator_en = p_use_cfg->owdmInitEn;
2711 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].measMethod = p_use_cfg->owdmMeasMeth;
2712 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].numberOfSamples = p_use_cfg->owdmNumSamps;
2713 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].filterType = p_use_cfg->owdmFltType;
2714 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].responseTo = p_use_cfg->owdmRspTo;
2715 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].measState = p_use_cfg->owdmMeasState;
2716 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].measId = p_use_cfg->owdmMeasId;
2717 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].owdm_enable = p_use_cfg->owdmEnable;
2718 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].owdm_PlLength = p_use_cfg->owdmPlLength;
2721 if(p_use_cfg->bbu_offload) {
2722 if (p_xran_fh_init->io_cfg.id == 0) { /* O-DU */
2723 p_xran_fh_init->io_cfg.bbu_offload = 1;
2724 p_xran_fh_init->dlCpProcBurst = 1;
2726 p_xran_fh_init->io_cfg.bbu_offload = 0;
2729 p_xran_fh_init->io_cfg.bbu_offload = 0;
2732 if (p_xran_fh_init->io_cfg.bbu_offload == 0 && XRAN_N_FE_BUF_LEN < 20)
2733 rte_panic("Sample application with out BBU requires XRAN_N_FE_BUF_LEN to be at least 20 TTIs\n");
2735 p_xran_fh_init->io_cfg.io_sleep = p_use_cfg->io_sleep;
2736 p_xran_fh_init->io_cfg.dpdkMemorySize = p_use_cfg->dpdk_mem_sz;
2737 p_xran_fh_init->io_cfg.bbdev_mode = XRAN_BBDEV_NOT_USED;
2739 p_xran_fh_init->xran_ports = p_use_cfg->oXuNum;
2740 p_xran_fh_init->io_cfg.nEthLinePerPort = p_use_cfg->EthLinesNumber;
2741 p_xran_fh_init->io_cfg.nEthLineSpeed = p_use_cfg->EthLinkSpeed;
2743 if(p_use_cfg->mlogxrandisable == 1)
2744 p_xran_fh_init->mlogxranenable = 0;
2746 p_xran_fh_init->mlogxranenable = 1;
2748 app_io_xran_eAxCid_conf_set(&p_xran_fh_init->eAxCId_conf, p_o_xu_cfg);
2751 if(p_use_cfg->one_vf_cu_plane == 1){
2755 for(o_xu_id = 0; o_xu_id < p_use_cfg->oXuNum; o_xu_id++ ) { /* all O-XU */
2756 for(pf_link_id = 0; pf_link_id < p_use_cfg->EthLinesNumber && pf_link_id < XRAN_ETH_PF_LINKS_NUM; pf_link_id++ ) { /* all PF ports for each O-XU */
2757 if(num_vfs_cu_p*i < (XRAN_VF_MAX - 1)) {
2758 p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i] = &p_use_cfg->o_xu_pcie_bus_addr[o_xu_id][num_vfs_cu_p*pf_link_id][0]; /* U-Plane */
2759 rte_ether_addr_copy(&p_use_cfg->remote_o_xu_addr[o_xu_id][num_vfs_cu_p*pf_link_id], &p_use_cfg->remote_o_xu_addr_copy[num_vfs_cu_p*i]);
2760 printf("VF[%d] %s\n",num_vfs_cu_p*i, p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i]);
2761 if(p_use_cfg->one_vf_cu_plane == 0){
2762 p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i+1] = &p_use_cfg->o_xu_pcie_bus_addr[o_xu_id][num_vfs_cu_p*pf_link_id+1][0]; /* C-Plane */
2763 rte_ether_addr_copy(&p_use_cfg->remote_o_xu_addr[o_xu_id][num_vfs_cu_p*pf_link_id+1], &p_use_cfg->remote_o_xu_addr_copy[num_vfs_cu_p*i+1]);
2764 printf("VF[%d] %s\n",num_vfs_cu_p*i+1, p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i+1]);
2773 p_xran_fh_init->io_cfg.one_vf_cu_plane = p_use_cfg->one_vf_cu_plane;
2775 if(p_xran_fh_init->io_cfg.one_vf_cu_plane) {
2776 p_use_cfg->num_vfs = i;
2778 p_use_cfg->num_vfs = 2*i;
2780 printf("p_use_cfg->num_vfs %d\n", p_use_cfg->num_vfs);
2781 printf("p_use_cfg->num_rxq %d\n", p_use_cfg->num_rxq);
2783 p_xran_fh_init->io_cfg.num_vfs = p_use_cfg->num_vfs;
2784 p_xran_fh_init->io_cfg.num_rxq = p_use_cfg->num_rxq;
2785 p_xran_fh_init->mtu = p_o_xu_cfg->mtu;
2786 if(p_use_cfg->appMode == APP_O_DU){
2787 p_xran_fh_init->p_o_du_addr = (int8_t *)p_o_xu_cfg->o_du_addr;
2788 p_xran_fh_init->p_o_ru_addr = (int8_t *)p_use_cfg->remote_o_xu_addr_copy;
2790 p_xran_fh_init->p_o_du_addr = (int8_t *)p_use_cfg->remote_o_xu_addr_copy;
2791 p_xran_fh_init->p_o_ru_addr = (int8_t *)p_o_xu_cfg->o_ru_addr;
2794 snprintf(p_use_cfg->prefix_name, sizeof(p_use_cfg->prefix_name), "wls_%d",p_use_cfg->instance_id);
2795 p_xran_fh_init->filePrefix = p_use_cfg->prefix_name;
2796 p_xran_fh_init->totalBfWeights = p_o_xu_cfg->totalBfWeights;
2799 for(o_xu_id = 0; o_xu_id < p_use_cfg->oXuNum && o_xu_id < XRAN_PORTS_NUM; o_xu_id++ ) { /* all O-XU */
2800 if(p_o_xu_buff[o_xu_id] == NULL) {
2801 ptr = _mm_malloc(sizeof(struct o_xu_buffers), 256);
2803 rte_panic("_mm_malloc: Can't allocate %lu bytes\n", sizeof(struct o_xu_buffers));
2805 p_o_xu_buff[o_xu_id] = (struct o_xu_buffers*)ptr;
2808 p_o_xu_cfg->p_buff = p_o_xu_buff[o_xu_id];
2816 app_io_xran_buffers_max_sz_set (RuntimeConfig* p_o_xu_cfg)
2818 uint32_t xran_max_sections_per_slot = RTE_MAX(p_o_xu_cfg->max_sections_per_slot, XRAN_MIN_SECTIONS_PER_SLOT);
2820 if (p_o_xu_cfg->mu_number <= 1){
2821 if (p_o_xu_cfg->mtu > XRAN_MTU_DEFAULT) {
2822 nFpgaToSW_FTH_RxBufferLen = 13168; /* 273*12*4 + 64*/
2823 nFpgaToSW_PRACH_RxBufferLen = 8192;
2824 nSW_ToFpga_FTH_TxBufferLen = 13168 + /* 273*12*4 + 64* + ETH AND ORAN HDRs */
2825 xran_max_sections_per_slot* (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2826 sizeof(struct xran_ecpri_hdr) +
2827 sizeof(struct radio_app_common_hdr) +
2828 sizeof(struct data_section_hdr));
2830 nFpgaToSW_FTH_RxBufferLen = XRAN_MTU_DEFAULT; /* 273*12*4 + 64*/
2831 nFpgaToSW_PRACH_RxBufferLen = XRAN_MTU_DEFAULT;
2832 nSW_ToFpga_FTH_TxBufferLen = 13168 + /* 273*12*4 + 64* + ETH AND ORAN HDRs */
2833 xran_max_sections_per_slot* (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2834 sizeof(struct xran_ecpri_hdr) +
2835 sizeof(struct radio_app_common_hdr) +
2836 sizeof(struct data_section_hdr));
2838 } else if (p_o_xu_cfg->mu_number == 3) {
2839 if (p_o_xu_cfg->mtu > XRAN_MTU_DEFAULT) {
2840 nFpgaToSW_FTH_RxBufferLen = 3328;
2841 nFpgaToSW_PRACH_RxBufferLen = 8192;
2842 nSW_ToFpga_FTH_TxBufferLen = 3328 +
2843 xran_max_sections_per_slot * (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2844 sizeof(struct xran_ecpri_hdr) +
2845 sizeof(struct radio_app_common_hdr) +
2846 sizeof(struct data_section_hdr));
2848 nFpgaToSW_FTH_RxBufferLen = XRAN_MTU_DEFAULT;
2849 nFpgaToSW_PRACH_RxBufferLen = XRAN_MTU_DEFAULT;
2850 nSW_ToFpga_FTH_TxBufferLen = 3328 +
2851 xran_max_sections_per_slot * (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2852 sizeof(struct xran_ecpri_hdr) +
2853 sizeof(struct radio_app_common_hdr) +
2854 sizeof(struct data_section_hdr));
2857 printf("given numerology is not supported %d\n", p_o_xu_cfg->mu_number);
2860 printf("nSW_ToFpga_FTH_TxBufferLen %d\n", nSW_ToFpga_FTH_TxBufferLen);
2865 app_io_xran_map_cellid_to_port(struct bbu_xran_io_if * p_xran_io, uint32_t cell_id, uint32_t *ret_cc_id)
2871 if(cell_id < XRAN_PORTS_NUM*XRAN_MAX_SECTOR_NR) {
2872 for (port_id = 0 ; port_id < XRAN_PORTS_NUM && port_id < p_xran_io->num_o_ru; port_id++) {
2873 for(cc_id = 0; cc_id < XRAN_MAX_SECTOR_NR && cc_id < p_xran_io->num_cc_per_port[port_id]; cc_id++)
2874 if(cell_id == (uint32_t)p_xran_io->map_cell_id2port[port_id][cc_id]) {
2884 printf("%s error [cell_id %d]\n", __FUNCTION__, cell_id);
2890 app_io_xran_fh_bbu_rx_callback(void *pCallbackTag, xran_status_t status)
2892 app_io_xran_fh_rx_callback(pCallbackTag, status);
2896 app_io_xran_fh_bbu_rx_bfw_callback(void *pCallbackTag, xran_status_t status)
2898 app_io_xran_fh_rx_bfw_callback(pCallbackTag, status);
2902 app_io_xran_fh_bbu_rx_prach_callback(void *pCallbackTag, xran_status_t status)
2904 app_io_xran_fh_rx_prach_callback(pCallbackTag, status);
2908 app_io_xran_fh_bbu_rx_srs_callback(void *pCallbackTag, xran_status_t status)
2910 app_io_xran_fh_rx_srs_callback(pCallbackTag, status);