1 /******************************************************************************
3 * Copyright (c) 2020 Intel.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
9 * http://www.apache.org/licenses/LICENSE-2.0
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
17 *******************************************************************************/
20 * @brief This module provides interface implementation to ORAN FH from Application side
21 * @file app_iof_fh_xran.c
23 * @author Intel Corporation
28 #include <sys/syscall.h>
29 #include <sys/sysinfo.h>
30 #include <immintrin.h>
33 #include "xran_mlog_lnx.h"
35 #include "xran_fh_o_du.h"
36 #include "xran_compression.h"
37 #include "xran_cp_api.h"
38 #include "xran_sync_api.h"
39 #include "xran_mlog_task_id.h"
40 #include "app_io_fh_xran.h"
43 uint32_t nFpgaToSW_FTH_RxBufferLen;
44 uint32_t nFpgaToSW_PRACH_RxBufferLen;
45 uint32_t nSW_ToFpga_FTH_TxBufferLen;
47 static struct bbu_xran_io_if *p_app_io_xran_if;
49 void * app_io_xran_handle = NULL;
50 struct xran_fh_init app_io_xran_fh_init;
51 struct xran_fh_config app_io_xran_fh_config[XRAN_PORTS_NUM];
53 void app_io_xran_fh_rx_callback(void *pCallbackTag, int32_t status);
54 void app_io_xran_fh_rx_prach_callback(void *pCallbackTag, int32_t status);
55 void app_io_xran_fh_rx_srs_callback(void *pCallbackTag, xran_status_t status);
57 struct bbu_xran_io_if *
58 app_io_xran_if_alloc(void)
62 ptr = _mm_malloc(sizeof(struct bbu_xran_io_if), 256);
64 rte_panic("_mm_malloc: Can't allocate %lu bytes\n", sizeof(struct bbu_xran_io_if));
66 p_app_io_xran_if = (struct bbu_xran_io_if *)ptr;
67 return p_app_io_xran_if;
70 struct bbu_xran_io_if *
71 app_io_xran_if_get(void)
73 return p_app_io_xran_if;
77 app_io_xran_if_free(void)
79 if (p_app_io_xran_if == NULL) {
80 rte_panic("_mm_free: Can't free p_app_io_xran_if\n");
82 _mm_free(p_app_io_xran_if);
86 struct xran_io_shared_ctrl *
87 app_io_xran_if_ctrl_get(uint32_t o_xu_id)
89 if(o_xu_id >= 0 && o_xu_id < XRAN_PORTS_NUM) {
90 return &p_app_io_xran_if->ioCtrl[o_xu_id];
97 app_io_xran_sfidx_get(uint8_t nNrOfSlotInSf)
101 uint32_t nSubframeIdx;
105 uint32_t nXranTime = xran_get_slot_idx(0, &nFrameIdx, &nSubframeIdx, &nSlotIdx, &nSecond);
106 nSfIdx = nFrameIdx*NUM_OF_SUBFRAME_PER_FRAME*nNrOfSlotInSf
107 + nSubframeIdx*nNrOfSlotInSf
110 printf("\nxranTime is %d, return is %d, radio frame is %d, subframe is %d slot is %d tsc is %llu us",
123 app_io_xran_fh_rx_callback(void *pCallbackTag, xran_status_t status)
125 uint64_t t1 = MLogTick();
126 uint32_t mlogVar[10];
127 uint32_t mlogVarCnt = 0;
128 uint8_t Numerlogy = app_io_xran_fh_config[0].frame_conf.nNumerology;
129 uint8_t nNrOfSlotInSf = 1<<Numerlogy;
130 int32_t sfIdx = app_io_xran_sfidx_get(nNrOfSlotInSf);
132 int32_t sym, nSlotIdx;
133 uint64_t mlog_start, mlog_end;
134 struct xran_cb_tag *pTag = (struct xran_cb_tag *) pCallbackTag;
136 mlog_start = MLogTick();
138 nCellIdx = pTag->cellId;
139 nSlotIdx = pTag->slotiId; ///((status >> 16) & 0xFFFF); /** TTI aka slotIdx */
140 sym = pTag->symbol & 0xFF; /* sym */
143 mlogVar[mlogVarCnt++] = 0xbcbcbcbc;
144 mlogVar[mlogVarCnt++] = nCellIdx;
145 mlogVar[mlogVarCnt++] = sym;
146 mlogVar[mlogVarCnt++] = nSlotIdx;
147 //mlogVar[mlogVarCnt++] = nSlotIdx % gNumSlotPerSfn[nCellIdx];
148 //mlogVar[mlogVarCnt++] = get_slot_type(nCellIdx, nSlotIdx, SLOT_TYPE_UL);
150 MLogAddVariables(mlogVarCnt, mlogVar, mlog_start);
155 MLogTask(PID_GNB_SYM_CB, t1, MLogTick());
160 app_io_xran_fh_rx_prach_callback(void *pCallbackTag, xran_status_t status)
162 uint64_t t1 = MLogTick();
163 uint32_t mlogVar[10];
164 uint32_t mlogVarCnt = 0;
166 mlogVar[mlogVarCnt++] = 0xDDDDDDDD;
167 mlogVar[mlogVarCnt++] = status >> 16; /* tti */
168 mlogVar[mlogVarCnt++] = status & 0xFF; /* sym */
169 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
172 MLogTask(PID_GNB_PRACH_CB, t1, MLogTick());
176 app_io_xran_fh_rx_srs_callback(void *pCallbackTag, xran_status_t status)
178 uint64_t t1 = MLogTick();
179 uint32_t mlogVar[10];
180 uint32_t mlogVarCnt = 0;
182 mlogVar[mlogVarCnt++] = 0xCCCCCCCC;
183 mlogVar[mlogVarCnt++] = status >> 16; /* tti */
184 mlogVar[mlogVarCnt++] = status & 0xFF; /* sym */
185 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
188 MLogTask(PID_GNB_SRS_CB, t1, MLogTick());
193 app_io_xran_dl_tti_call_back(void * param)
195 uint64_t t1 = MLogTick();
197 MLogTask(PID_GNB_PROC_TIMING, t1, MLogTick());
202 app_io_xran_ul_half_slot_call_back(void * param)
204 uint64_t t1 = MLogTick();
206 MLogTask(PID_GNB_PROC_TIMING, t1, MLogTick());
211 app_io_xran_ul_full_slot_call_back(void * param)
213 uint64_t t1 = MLogTick();
215 MLogTask(PID_GNB_PROC_TIMING, t1, MLogTick());
220 app_io_xran_ul_custom_sym_call_back(void * param, struct xran_sense_of_time* time)
222 uint64_t t1 = MLogTick();
223 uint32_t mlogVar[15];
224 uint32_t mlogVarCnt = 0;
225 uint32_t sym_idx = 0;
227 mlogVar[mlogVarCnt++] = 0xDEADDEAD;
229 mlogVar[mlogVarCnt++] = time->type_of_event;
230 mlogVar[mlogVarCnt++] = time->nSymIdx;
231 mlogVar[mlogVarCnt++] = time->tti_counter;
232 mlogVar[mlogVarCnt++] = time->nFrameIdx;
233 mlogVar[mlogVarCnt++] = time->nSubframeIdx;
234 mlogVar[mlogVarCnt++] = time->nSlotIdx;
235 mlogVar[mlogVarCnt++] = (uint32_t)(time->nSecond);
236 mlogVar[mlogVarCnt++] = (uint32_t)(time->nSecond >> 32);
237 sym_idx = time->nSymIdx;
239 MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
242 MLogTask(PID_GNB_SYM_CB + sym_idx, t1, MLogTick());
247 app_io_xran_interface(uint32_t o_xu_id, RuntimeConfig *p_o_xu_cfg, UsecaseConfig* p_use_cfg)
249 xran_status_t status;
250 struct bbu_xran_io_if *psBbuIo = app_io_xran_if_get();
251 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
252 int32_t nSectorIndex[XRAN_MAX_SECTOR_NR];
254 int32_t i, j, k, m, z;
261 uint32_t xran_max_antenna_nr = RTE_MAX(p_o_xu_cfg->numAxc, p_o_xu_cfg->numUlAxc);
262 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(p_o_xu_cfg->antElmTRx, xran_max_antenna_nr);
263 uint32_t xran_max_sections_per_slot = RTE_MAX(p_o_xu_cfg->max_sections_per_slot, XRAN_MIN_SECTIONS_PER_SLOT);
264 uint32_t size_of_prb_map = sizeof(struct xran_prb_map) + sizeof(struct xran_prb_elm)*(xran_max_sections_per_slot - 1);
266 SWXRANInterfaceTypeEnum eInterfaceType;
268 struct xran_buffer_list *pFthTxBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
269 struct xran_buffer_list *pFthTxPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
270 struct xran_buffer_list *pFthRxBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
271 struct xran_buffer_list *pFthRxPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
272 struct xran_buffer_list *pFthRxRachBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
273 struct xran_buffer_list *pFthRxRachBufferDecomp[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANTENNA_NR][XRAN_N_FE_BUF_LEN];
274 struct xran_buffer_list *pFthRxSrsBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN];
275 struct xran_buffer_list *pFthRxSrsPrbMapBuffer[XRAN_MAX_SECTOR_NR][XRAN_MAX_ANT_ARRAY_ELM_NR][XRAN_N_FE_BUF_LEN];
278 rte_panic("psBbuIo == NULL\n");
281 rte_panic("psIoCtrl == NULL\n");
283 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++)
285 nSectorIndex[nSectorNum] = nSectorNum;
288 nSectorNum = p_o_xu_cfg->numCC;
289 printf ("XRAN front haul xran_mm_init \n");
290 status = xran_mm_init (app_io_xran_handle, (uint64_t) SW_FPGA_FH_TOTAL_BUFFER_LEN, SW_FPGA_SEGMENT_BUFFER_LEN);
291 if (status != XRAN_STATUS_SUCCESS)
293 printf ("Failed at XRAN front haul xran_mm_init \n");
297 psBbuIo->nInstanceNum[o_xu_id] = p_o_xu_cfg->numCC;
298 if (o_xu_id < XRAN_PORTS_NUM) {
299 status = xran_sector_get_instances (o_xu_id, app_io_xran_handle, psBbuIo->nInstanceNum[o_xu_id], &psBbuIo->nInstanceHandle[o_xu_id][0]);
300 if (status != XRAN_STATUS_SUCCESS) {
301 printf ("get sector instance failed %d for XRAN nInstanceNum[%d] %d\n",k, psBbuIo->nInstanceNum[o_xu_id], o_xu_id);
304 for (i = 0; i < psBbuIo->nInstanceNum[o_xu_id]; i++) {
305 printf("%s [%d]: CC %d handle %p\n", __FUNCTION__, k, i, psBbuIo->nInstanceHandle[o_xu_id][i]);
308 printf ("Failed at XRAN front haul xran_mm_init \n");
312 printf("Sucess xran_mm_init \n");
313 printf("nSectorNum %d\n", nSectorNum);
314 printf("xran_max_sections_per_slot %d\n", xran_max_sections_per_slot);
317 for(i = 0; i < nSectorNum; i++)
319 eInterfaceType = XRANFTHTX_OUT;
320 printf("nSectorIndex[%d] = %d\n",i, nSectorIndex[i]);
321 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
322 XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, nSW_ToFpga_FTH_TxBufferLen);
323 if(XRAN_STATUS_SUCCESS != status) {
324 rte_panic("Failed at xran_bm_init , status %d\n", status);
326 for(j = 0; j < XRAN_N_FE_BUF_LEN; j++)
328 for(z = 0; z < xran_max_antenna_nr; z++){
329 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].bValid = 0;
330 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
331 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
332 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
333 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
334 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulTxBuffers[j][i][z][0];
336 for(k = 0; k < XRAN_NUM_OF_SYMBOL_PER_SLOT; k++)
338 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = nSW_ToFpga_FTH_TxBufferLen; // 14 symbols 3200bytes/symbol
339 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
340 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
341 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
342 if(XRAN_STATUS_SUCCESS != status){
343 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
345 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
346 psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *)mb;
349 u32dptr = (uint32_t*)(ptr);
350 memset(u32dptr, 0x0, nSW_ToFpga_FTH_TxBufferLen);
351 // ptr_temp[0] = j; // TTI
352 // ptr_temp[1] = i; // Sec
353 // ptr_temp[2] = z; // Ant
354 // ptr_temp[3] = k; // sym
361 eInterfaceType = XRANFTHTX_SEC_DESC_OUT;
362 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
363 XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT*xran_max_sections_per_slot*XRAN_MAX_FRAGMENT, sizeof(struct xran_section_desc));
364 if(XRAN_STATUS_SUCCESS != status) {
365 rte_panic("Failed at xran_bm_init , status %d\n", status);
368 printf("size_of_prb_map %d\n", size_of_prb_map);
370 eInterfaceType = XRANFTHTX_PRB_MAP_OUT;
371 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
372 XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, size_of_prb_map);
373 if(XRAN_STATUS_SUCCESS != status) {
374 rte_panic("Failed at xran_bm_init , status %d\n", status);
377 for(j = 0; j < XRAN_N_FE_BUF_LEN; j++)
379 for(z = 0; z < xran_max_antenna_nr; z++){
380 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
381 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
382 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
383 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
384 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
385 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulTxPrbMapBuffers[j][i][z];
388 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
389 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
390 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
391 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
392 if(XRAN_STATUS_SUCCESS != status) {
393 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
395 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
396 psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
402 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
403 if (p_o_xu_cfg->appMode == APP_O_DU) {
404 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
405 memcpy(ptr, p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], size_of_prb_map);
407 memcpy(ptr, p_o_xu_cfg->p_PrbMapDl, size_of_prb_map);
410 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
411 memcpy(ptr, p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_UL][j][i][z], size_of_prb_map);
413 memcpy(ptr, p_o_xu_cfg->p_PrbMapUl, size_of_prb_map);
417 for (elm_id = 0; elm_id < p_rb_map->nPrbElm; elm_id++){
418 struct xran_prb_elm *pPrbElem = &p_rb_map->prbMap[elm_id];
419 for(k = 0; k < XRAN_NUM_OF_SYMBOL_PER_SLOT; k++){
420 for(m = 0; m < XRAN_MAX_FRAGMENT; m++){
421 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][XRANFTHTX_SEC_DESC_OUT],&sd_ptr, &sd_mb);
422 if(XRAN_STATUS_SUCCESS != status){
423 rte_panic("SD Failed at DESC_OUT xran_bm_allocate_buffer , m %d k %d elm_id %d\n",m,k, elm_id);
425 pPrbElem->p_sec_desc[k][m] = sd_ptr;
426 memset(sd_ptr,0,sizeof(struct xran_section_desc));
436 for(i = 0; i<nSectorNum; i++)
438 eInterfaceType = XRANFTHRX_IN;
439 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType], XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, nSW_ToFpga_FTH_TxBufferLen);
440 if(XRAN_STATUS_SUCCESS != status)
442 printf("Failed at xran_bm_init, status %d\n", status);
443 iAssert(status == XRAN_STATUS_SUCCESS);
446 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++)
448 for(z = 0; z < xran_max_antenna_nr; z++){
449 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].bValid = 0;
450 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
451 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
452 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
453 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
454 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulRxBuffers[j][i][z][0];
455 for(k = 0; k< XRAN_NUM_OF_SYMBOL_PER_SLOT; k++)
457 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = nFpgaToSW_FTH_RxBufferLen; // 1 symbols 3200bytes
458 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
459 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
460 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
461 if(XRAN_STATUS_SUCCESS != status) {
462 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
464 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
465 psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *) mb;
467 u32dptr = (uint32_t*)(ptr);
468 uint8_t *ptr_temp = (uint8_t *)ptr;
469 memset(u32dptr, 0x0, nFpgaToSW_FTH_RxBufferLen);
470 // ptr_temp[0] = j; // TTI
471 // ptr_temp[1] = i; // Sec
472 // ptr_temp[2] = z; // Ant
473 // ptr_temp[3] = k; // sym
480 eInterfaceType = XRANFTHTX_SEC_DESC_IN;
481 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
482 XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT*xran_max_sections_per_slot*XRAN_MAX_FRAGMENT, sizeof(struct xran_section_desc));
483 if(XRAN_STATUS_SUCCESS != status) {
484 rte_panic("Failed at xran_bm_init , status %d\n", status);
486 eInterfaceType = XRANFTHRX_PRB_MAP_IN;
487 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
488 XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, size_of_prb_map);
489 if(XRAN_STATUS_SUCCESS != status) {
490 rte_panic("Failed at xran_bm_init, status %d\n", status);
493 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++) {
494 for(z = 0; z < xran_max_antenna_nr; z++){
495 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
496 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
497 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
498 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
499 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
500 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFrontHaulRxPrbMapBuffers[j][i][z];
502 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
503 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
504 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
505 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
506 if(XRAN_STATUS_SUCCESS != status) {
507 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
509 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
510 psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
515 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
517 if (p_o_xu_cfg->appMode == APP_O_DU) {
518 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
519 memcpy(ptr, p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_UL][j][i][z], size_of_prb_map);
521 memcpy(ptr, p_o_xu_cfg->p_PrbMapUl, size_of_prb_map);
524 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
525 memcpy(ptr, p_o_xu_cfg->p_RunSlotPrbMap[XRAN_DIR_DL][j][i][z], size_of_prb_map);
527 memcpy(ptr, p_o_xu_cfg->p_PrbMapDl, size_of_prb_map);
531 for (elm_id = 0; elm_id < p_rb_map->nPrbElm; elm_id++){
532 struct xran_prb_elm *pPrbElem = &p_rb_map->prbMap[elm_id];
533 for(k = 0; k < XRAN_NUM_OF_SYMBOL_PER_SLOT; k++){
534 for(m = 0; m < XRAN_MAX_FRAGMENT; m++){
535 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][XRANFTHTX_SEC_DESC_IN],&sd_ptr, &sd_mb);
536 if(XRAN_STATUS_SUCCESS != status){
537 rte_panic("SD Failed at DESC_IN xran_bm_allocate_buffer , m %d k %d\n",m,k);
539 pPrbElem->p_sec_desc[k][m] = sd_ptr;
540 memset(sd_ptr,0,sizeof(struct xran_section_desc));
550 // add prach rx buffer
551 for(i = 0; i<nSectorNum; i++)
553 eInterfaceType = XRANFTHRACH_IN;
554 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i],&psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],XRAN_N_FE_BUF_LEN*xran_max_antenna_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, PRACH_PLAYBACK_BUFFER_BYTES);
555 if(XRAN_STATUS_SUCCESS != status) {
556 rte_panic("Failed at xran_bm_init, status %d\n", status);
558 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++)
560 for(z = 0; z < xran_max_antenna_nr; z++){
561 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].bValid = 0;
562 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
563 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
564 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
565 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = xran_max_antenna_nr; // ant number.
566 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHPrachRxBuffers[j][i][z][0];
567 psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHPrachRxBuffersDecomp[j][i][z][0];
568 for(k = 0; k< XRAN_NUM_OF_SYMBOL_PER_SLOT; k++)
570 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = PRACH_PLAYBACK_BUFFER_BYTES;
571 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
572 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
574 if (p_o_xu_cfg->appMode == APP_O_RU) {
575 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
576 if(XRAN_STATUS_SUCCESS != status) {
577 rte_panic("Failed at xran_bm_allocate_buffer, status %d\n",status);
579 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
580 psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *)mb;
582 u32dptr = (uint32_t*)(ptr);
583 memset(u32dptr, 0x0, PRACH_PLAYBACK_BUFFER_BYTES);
585 psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[j][i][z].sBufferList.pBuffers[k].pData= (uint8_t *)ptr;
592 /* add SRS rx buffer */
593 printf("%s:%d: xran_max_ant_array_elm_nr %d\n", __FUNCTION__, __LINE__, xran_max_ant_array_elm_nr);
594 for(i = 0; i<nSectorNum && xran_max_ant_array_elm_nr; i++) {
595 eInterfaceType = XRANSRS_IN;
596 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i],&psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
597 XRAN_N_FE_BUF_LEN*xran_max_ant_array_elm_nr*XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT, nSW_ToFpga_FTH_TxBufferLen);
599 if(XRAN_STATUS_SUCCESS != status) {
600 rte_panic("Failed at xran_bm_init, status %d\n", status);
602 for(j = 0; j < XRAN_N_FE_BUF_LEN; j++) {
603 for(z = 0; z < xran_max_ant_array_elm_nr; z++){
604 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].bValid = 0;
605 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
606 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
607 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
608 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = xran_max_ant_array_elm_nr; /* ant number */
609 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHSrsRxBuffers[j][i][z][0];
610 for(k = 0; k < XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT; k++)
612 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nElementLenInBytes = nSW_ToFpga_FTH_TxBufferLen;
613 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nNumberOfElements = 1;
614 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].nOffsetInBytes = 0;
615 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
616 if(XRAN_STATUS_SUCCESS != status) {
617 rte_panic("Failed at xran_bm_allocate_buffer, status %d\n",status);
619 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pData = (uint8_t *)ptr;
620 psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList.pBuffers[k].pCtrl = (void *)mb;
622 u32dptr = (uint32_t*)(ptr);
623 memset(u32dptr, 0x0, nSW_ToFpga_FTH_TxBufferLen);
630 eInterfaceType = XRANSRS_SEC_DESC_IN;
631 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
632 XRAN_N_FE_BUF_LEN*xran_max_ant_array_elm_nr*xran_max_sections_per_slot*XRAN_NUM_OF_SYMBOL_PER_SLOT*XRAN_MAX_FRAGMENT, sizeof(struct xran_section_desc));
633 if(XRAN_STATUS_SUCCESS != status) {
634 rte_panic("Failed at xran_bm_init , status %d\n", status);
636 eInterfaceType = XRANSRS_PRB_MAP_IN;
637 status = xran_bm_init(psBbuIo->nInstanceHandle[o_xu_id][i], &psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],
638 XRAN_N_FE_BUF_LEN*xran_max_ant_array_elm_nr*XRAN_NUM_OF_SYMBOL_PER_SLOT, size_of_prb_map);
639 if(XRAN_STATUS_SUCCESS != status) {
640 rte_panic("Failed at xran_bm_init, status %d\n", status);
643 for(j = 0;j < XRAN_N_FE_BUF_LEN; j++) {
644 for(z = 0; z < xran_max_ant_array_elm_nr; z++) {
645 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].bValid = 0;
646 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].nSegGenerated = -1;
647 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].nSegToBeGen = -1;
648 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].nSegTransferred = 0;
649 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.nNumBuffers = XRAN_NUM_OF_SYMBOL_PER_SLOT;
650 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers = &psIoCtrl->sFHSrsRxPrbMapBuffers[j][i][z];
652 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nElementLenInBytes = size_of_prb_map;
653 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nNumberOfElements = 1;
654 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->nOffsetInBytes = 0;
655 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i],psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][eInterfaceType],&ptr, &mb);
656 if(XRAN_STATUS_SUCCESS != status) {
657 rte_panic("Failed at xran_bm_allocate_buffer , status %d\n",status);
659 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pData = (uint8_t *)ptr;
660 psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList.pBuffers->pCtrl = (void *)mb;
666 struct xran_prb_map * p_rb_map = (struct xran_prb_map *)ptr;
668 if (p_o_xu_cfg->appMode == APP_O_DU) {
669 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
670 memcpy(ptr, p_o_xu_cfg->p_RunSrsSlotPrbMap[XRAN_DIR_UL][j][i][z], size_of_prb_map);
672 memcpy(ptr, p_o_xu_cfg->p_PrbMapSrs, size_of_prb_map);
675 if(p_o_xu_cfg->RunSlotPrbMapEnabled) {
676 memcpy(ptr, p_o_xu_cfg->p_RunSrsSlotPrbMap[XRAN_DIR_DL][j][i][z], size_of_prb_map);
678 memcpy(ptr, p_o_xu_cfg->p_PrbMapSrs, size_of_prb_map);
682 for (elm_id = 0; elm_id < p_rb_map->nPrbElm; elm_id++){
683 struct xran_prb_elm *pPrbElem = &p_rb_map->prbMap[elm_id];
684 for(k = 0; k < XRAN_NUM_OF_SYMBOL_PER_SLOT; k++){
685 for(m = 0; m < XRAN_MAX_FRAGMENT; m++){
686 status = xran_bm_allocate_buffer(psBbuIo->nInstanceHandle[o_xu_id][i], psBbuIo->nBufPoolIndex[o_xu_id][nSectorIndex[i]][XRANSRS_SEC_DESC_IN],&sd_ptr, &sd_mb);
687 if(XRAN_STATUS_SUCCESS != status){
688 rte_panic("SD Failed at SRS_SEC_DESC_IN xran_bm_allocate_buffer , m %d k %d\n",m,k);
690 pPrbElem->p_sec_desc[k][m] = sd_ptr;
691 memset(sd_ptr,0,sizeof(struct xran_section_desc));
701 for(i=0; i<nSectorNum; i++)
703 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
705 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
706 pFthTxBuffer[i][z][j] = NULL;
707 pFthTxPrbMapBuffer[i][z][j] = NULL;
708 pFthRxBuffer[i][z][j] = NULL;
709 pFthRxPrbMapBuffer[i][z][j] = NULL;
710 pFthRxRachBuffer[i][z][j] = NULL;
711 pFthRxRachBufferDecomp[i][z][j] = NULL;
713 for(z = 0; z < XRAN_MAX_ANT_ARRAY_ELM_NR; z++){
714 pFthRxSrsBuffer[i][z][j] = NULL;
715 pFthRxSrsPrbMapBuffer[i][z][j] = NULL;
720 for(i=0; i<nSectorNum; i++)
722 for(j=0; j<XRAN_N_FE_BUF_LEN; j++)
724 for(z = 0; z < XRAN_MAX_ANTENNA_NR; z++){
725 pFthTxBuffer[i][z][j] = &(psIoCtrl->sFrontHaulTxBbuIoBufCtrl[j][i][z].sBufferList);
726 pFthTxPrbMapBuffer[i][z][j] = &(psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
727 pFthRxBuffer[i][z][j] = &(psIoCtrl->sFrontHaulRxBbuIoBufCtrl[j][i][z].sBufferList);
728 pFthRxPrbMapBuffer[i][z][j] = &(psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
729 pFthRxRachBuffer[i][z][j] = &(psIoCtrl->sFHPrachRxBbuIoBufCtrl[j][i][z].sBufferList);
730 pFthRxRachBufferDecomp[i][z][j] = &(psIoCtrl->sFHPrachRxBbuIoBufCtrlDecomp[j][i][z].sBufferList);
733 for(z = 0; z < XRAN_MAX_ANT_ARRAY_ELM_NR && xran_max_ant_array_elm_nr; z++){
734 pFthRxSrsBuffer[i][z][j] = &(psIoCtrl->sFHSrsRxBbuIoBufCtrl[j][i][z].sBufferList);
735 pFthRxSrsPrbMapBuffer[i][z][j] = &(psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[j][i][z].sBufferList);
740 if(NULL != psBbuIo->nInstanceHandle[o_xu_id])
742 /* add pusch callback */
743 for (i = 0; i<nSectorNum; i++)
745 psBbuIo->RxCbTag[o_xu_id][i].cellId = i;
746 psBbuIo->RxCbTag[o_xu_id][i].symbol = 0;
747 psBbuIo->RxCbTag[o_xu_id][i].slotiId = 0;
748 xran_5g_fronthault_config (psBbuIo->nInstanceHandle[o_xu_id][i],
750 pFthTxPrbMapBuffer[i],
752 pFthRxPrbMapBuffer[i],
753 app_io_xran_fh_rx_callback, &psBbuIo->RxCbTag[o_xu_id][i]);
755 /* add prach callback here */
756 for (i = 0; i<nSectorNum; i++)
758 psBbuIo->PrachCbTag[o_xu_id][i].cellId = i;
759 psBbuIo->PrachCbTag[o_xu_id][i].symbol = 0;
760 psBbuIo->PrachCbTag[o_xu_id][i].slotiId = 0;
761 xran_5g_prach_req(psBbuIo->nInstanceHandle[o_xu_id][i], pFthRxRachBuffer[i],pFthRxRachBufferDecomp[i],
762 app_io_xran_fh_rx_prach_callback,&psBbuIo->PrachCbTag[o_xu_id][i]);
765 /* add SRS callback here */
766 for (i = 0; i<nSectorNum && xran_max_ant_array_elm_nr; i++) {
767 psBbuIo->SrsCbTag[o_xu_id][i].cellId = i;
768 psBbuIo->SrsCbTag[o_xu_id][i].symbol = 0;
769 psBbuIo->SrsCbTag[o_xu_id][i].slotiId = 0;
770 xran_5g_srs_req(psBbuIo->nInstanceHandle[o_xu_id][i], pFthRxSrsBuffer[i], pFthRxSrsPrbMapBuffer[i],
771 app_io_xran_fh_rx_srs_callback,&psBbuIo->SrsCbTag[o_xu_id][i]);
779 app_io_xran_ext_type11_populate(struct xran_prb_elm* p_pRbMapElm, int16_t *p_tx_dl_bfw_buffer, uint32_t mtu)
781 xran_status_t status = XRAN_STATUS_SUCCESS;
785 int32_t n_max_set_bfw;
787 p_pRbMapElm->bf_weight.maxExtBufSize = mtu; /* MAX_RX_LEN; */ /* Maximum space of external buffer */
788 extbuf = (uint8_t*)xran_malloc(p_pRbMapElm->bf_weight.maxExtBufSize);
790 rte_panic("xran_malloc return NULL\n");
792 /* Check BFWs can be fit with MTU size */
793 n_max_set_bfw = xran_cp_estimate_max_set_bfws(p_pRbMapElm->bf_weight.nAntElmTRx,
794 p_pRbMapElm->bf_weight.bfwIqWidth,
795 p_pRbMapElm->bf_weight.bfwCompMeth,
798 if(p_pRbMapElm->bf_weight.numSetBFWs > n_max_set_bfw) {
799 /* PRB elm doesn't fit into packet MTU size */
800 rte_panic("BFWs are too large with MTU %d! (cfg:%d / max:%d)\n",
801 mtu, p_pRbMapElm->bf_weight.numSetBFWs, n_max_set_bfw);
805 /* Configure source buffer and beam ID of BFWs */
806 for(i = 0; i < p_pRbMapElm->bf_weight.numSetBFWs; i++) {
807 p_pRbMapElm->bf_weight.bfw[i].pBFWs = (uint8_t *)(p_tx_dl_bfw_buffer + p_pRbMapElm->bf_weight.nAntElmTRx*2*i);
808 p_pRbMapElm->bf_weight.bfw[i].beamId = 0x7000+i;
811 n_max_set_bfw = xran_cp_prepare_ext11_bfws(p_pRbMapElm->bf_weight.numSetBFWs,
812 p_pRbMapElm->bf_weight.nAntElmTRx,
813 p_pRbMapElm->bf_weight.bfwIqWidth,
814 p_pRbMapElm->bf_weight.bfwCompMeth,
816 p_pRbMapElm->bf_weight.maxExtBufSize,
817 p_pRbMapElm->bf_weight.bfw);
818 if(n_max_set_bfw > 0) {
819 p_pRbMapElm->bf_weight.ext_section_sz = n_max_set_bfw;
820 p_pRbMapElm->bf_weight.p_ext_start = (int8_t *)extbuf;
822 rte_panic("Fail to prepare BFWs for extension 11!\n");
828 app_io_xran_iq_content_init_cp_rb_map(struct xran_prb_map* pRbMap,
829 enum xran_pkt_dir dir, int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, uint16_t nRBs)
832 pRbMap->xran_port = 0;
834 pRbMap->cc_id = cc_id;
835 pRbMap->ru_port_id = ant_id;
836 pRbMap->tti_id = tti;
837 pRbMap->start_sym_id = 0;
839 pRbMap->prbMap[0].nRBStart = 0;
840 pRbMap->prbMap[0].nRBSize = nRBs;
841 pRbMap->prbMap[0].nStartSymb = 0;
842 pRbMap->prbMap[0].numSymb = 14;
843 pRbMap->prbMap[0].p_sec_desc[sym_id][0]->iq_buffer_offset = 0;
844 pRbMap->prbMap[0].p_sec_desc[sym_id][0]->iq_buffer_len = nRBs *4L;
845 pRbMap->prbMap[0].nBeamIndex = 0;
846 pRbMap->prbMap[0].compMethod = XRAN_COMPMETHOD_NONE;
853 app_io_xran_iq_content_init_cp_tx(uint8_t appMode, struct xran_fh_config *pXranConf,
854 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
855 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
858 struct xran_prb_map* pRbMap = NULL;
860 if(p_iq->p_tx_play_buffer[flowId]) {
861 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
863 if (pXranConf->DynamicSectionEna == 0) {
864 app_io_xran_iq_content_init_cp_rb_map(pRbMap, XRAN_DIR_DL, cc_id, ant_id, sym_id, tti, pXranConf->nDLRBs);
865 } else if(pXranConf->ru_conf.xranCat == XRAN_CATEGORY_B
866 && appMode == APP_O_DU
867 && sym_id == 0) { /* BFWs are per slot */
870 char* dl_bfw_pos = ((char*)p_iq->p_tx_dl_bfw_buffer[flowId]) + p_iq->tx_dl_bfw_buffer_position[flowId];
871 struct xran_prb_elm* p_pRbMapElm = NULL;
873 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
874 p_pRbMapElm = &pRbMap->prbMap[idxElm];
875 p_pRbMapElm->bf_weight.nAntElmTRx = pXranConf->nAntElmTRx;
877 if(p_pRbMapElm->BeamFormingType == XRAN_BEAM_WEIGHT && p_pRbMapElm->bf_weight_update) {
878 if(p_pRbMapElm->bf_weight.numBundPrb == 0) {
879 /* No bundled PRB, using extension 1 */
880 int16_t ext_len = 9600;
881 int16_t ext_sec_total = 0;
882 int8_t * ext_buf =(int8_t*) xran_malloc(ext_len);
883 int8_t * ext_buf_start = ext_buf;
885 ext_buf += (RTE_PKTMBUF_HEADROOM +
886 sizeof(struct xran_ecpri_hdr) +
887 sizeof(struct xran_cp_radioapp_section1_header) +
888 sizeof(struct xran_cp_radioapp_section1));
890 ext_len -= (RTE_PKTMBUF_HEADROOM +
891 sizeof(struct xran_ecpri_hdr) +
892 sizeof(struct xran_cp_radioapp_section1_header) +
893 sizeof(struct xran_cp_radioapp_section1));
895 ext_sec_total = xran_cp_populate_section_ext_1((int8_t *)ext_buf,
897 (int16_t *) (dl_bfw_pos + (p_pRbMapElm->nRBStart*p_pRbMapElm->bf_weight.nAntElmTRx)*4),
898 p_pRbMapElm->nRBSize,
899 p_pRbMapElm->bf_weight.nAntElmTRx,
900 p_pRbMapElm->iqWidth, p_pRbMapElm->compMethod);
901 if(ext_sec_total > 0) {
902 p_pRbMapElm->bf_weight.p_ext_start = ext_buf_start;
903 p_pRbMapElm->bf_weight.p_ext_section = ext_buf;
904 p_pRbMapElm->bf_weight.ext_section_sz = ext_sec_total;
906 rte_panic("xran_cp_populate_section_ext_1 return error [%d]\n", ext_sec_total);
908 rte_panic("xran_malloc return NULL\n");
911 app_io_xran_ext_type11_populate(p_pRbMapElm, p_iq->p_tx_dl_bfw_buffer[flowId], app_io_xran_fh_init.mtu);
917 printf("DL pRbMap ==NULL\n");
921 if(pXranConf->ru_conf.xranCat == XRAN_CATEGORY_B && appMode == APP_O_DU && sym_id == 0) {
922 p_iq->tx_dl_bfw_buffer_position[flowId] += (pXranConf->nDLRBs*pXranConf->nAntElmTRx)*4;
923 if(p_iq->tx_dl_bfw_buffer_position[flowId] >= p_iq->tx_dl_bfw_buffer_size[flowId])
924 p_iq->tx_dl_bfw_buffer_position[flowId] = 0;
927 //printf("flowId %d\n", flowId);
935 app_io_xran_iq_content_init_cp_rx(uint8_t appMode, struct xran_fh_config *pXranConf,
936 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
937 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
940 struct xran_prb_map* pRbMap = NULL;
944 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
946 if (pXranConf->DynamicSectionEna == 0) {
947 app_io_xran_iq_content_init_cp_rb_map(pRbMap, XRAN_DIR_UL, cc_id, ant_id, sym_id, tti, pXranConf->nULRBs);
948 } else if(pXranConf->ru_conf.xranCat == XRAN_CATEGORY_B
949 && appMode == APP_O_DU
952 char * ul_bfw_pos = ((char*)p_iq->p_tx_ul_bfw_buffer[flowId]) + p_iq->tx_ul_bfw_buffer_position[flowId];
953 struct xran_prb_elm* p_pRbMapElm = NULL;
955 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
956 p_pRbMapElm = &pRbMap->prbMap[idxElm];
957 p_pRbMapElm->bf_weight.nAntElmTRx = pXranConf->nAntElmTRx;
959 if(p_pRbMapElm->BeamFormingType == XRAN_BEAM_WEIGHT && p_pRbMapElm->bf_weight_update) {
960 if(p_pRbMapElm->bf_weight.numBundPrb == 0) {
961 /* No bundled PRB, using extension 1 */
963 int16_t ext_len = 9600;
964 int16_t ext_sec_total = 0;
965 int8_t * ext_buf =(int8_t*) xran_malloc(ext_len);
966 int8_t * ext_buf_start = ext_buf;
971 ext_buf += (RTE_PKTMBUF_HEADROOM +
972 sizeof(struct xran_ecpri_hdr) +
973 sizeof(struct xran_cp_radioapp_section1_header) +
974 sizeof(struct xran_cp_radioapp_section1));
976 ext_len -= (RTE_PKTMBUF_HEADROOM +
977 sizeof(struct xran_ecpri_hdr) +
978 sizeof(struct xran_cp_radioapp_section1_header) +
979 sizeof(struct xran_cp_radioapp_section1));
981 ptr = (int16_t*)(ul_bfw_pos +(p_pRbMapElm->nRBStart*p_pRbMapElm->bf_weight.nAntElmTRx)*4);
982 ext_sec_total = xran_cp_populate_section_ext_1((int8_t *)ext_buf,
984 (int16_t *) (ul_bfw_pos + (p_pRbMapElm->nRBStart*p_pRbMapElm->bf_weight.nAntElmTRx)*4),
985 p_pRbMapElm->nRBSize,
986 p_pRbMapElm->bf_weight.nAntElmTRx,
987 p_pRbMapElm->iqWidth, p_pRbMapElm->compMethod);
988 if(ext_sec_total > 0) {
989 p_pRbMapElm->bf_weight.p_ext_start = ext_buf_start;
990 p_pRbMapElm->bf_weight.p_ext_section = ext_buf;
991 p_pRbMapElm->bf_weight.ext_section_sz = ext_sec_total;
993 rte_panic("xran_cp_populate_section_ext_1 return error [%d]\n", ext_sec_total);
996 rte_panic("xran_malloc return NULL\n");
999 app_io_xran_ext_type11_populate(p_pRbMapElm, p_iq->p_tx_ul_bfw_buffer[flowId], app_io_xran_fh_init.mtu);
1004 p_iq->tx_ul_bfw_buffer_position[flowId] += (pXranConf->nULRBs*pXranConf->nAntElmTRx)*4;
1005 if(p_iq->tx_ul_bfw_buffer_position[flowId] >= p_iq->tx_ul_bfw_buffer_size[flowId])
1006 p_iq->tx_ul_bfw_buffer_position[flowId] = 0;
1008 rte_panic("DL pRbMap ==NULL\n");
1015 app_io_xran_iq_content_init_up_tx(uint8_t appMode, struct xran_fh_config *pXranConf,
1016 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1017 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
1021 uint8_t* u8dptr = NULL;
1022 struct xran_prb_map* pRbMap = NULL;
1023 enum xran_comp_hdr_type staticEn = XRAN_COMP_HDR_TYPE_DYNAMIC;
1025 if (pXranConf != NULL)
1027 staticEn = pXranConf->ru_conf.xranCompHdrType;
1030 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulTxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
1031 pos = ((char*)p_iq->p_tx_play_buffer[flowId]) + p_iq->tx_play_buffer_position[flowId];
1032 ptr = psIoCtrl->sFrontHaulTxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1036 u8dptr = (uint8_t*)ptr;
1037 int16_t payload_len = 0;
1039 uint8_t *dst = (uint8_t *)u8dptr;
1040 uint8_t *src = (uint8_t *)pos;
1041 struct xran_prb_elm* p_prbMapElm = &pRbMap->prbMap[idxElm];
1042 dst = xran_add_hdr_offset(dst, ((staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE));
1044 for (idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
1045 struct xran_section_desc *p_sec_desc = NULL;
1046 p_prbMapElm = &pRbMap->prbMap[idxElm];
1047 p_sec_desc = p_prbMapElm->p_sec_desc[sym_id][0];
1049 if(p_sec_desc == NULL) {
1050 rte_panic ("p_sec_desc == NULL\n");
1053 /* skip, if not scheduled */
1054 if(sym_id < p_prbMapElm->nStartSymb || sym_id >= p_prbMapElm->nStartSymb + p_prbMapElm->numSymb){
1055 p_sec_desc->iq_buffer_offset = 0;
1056 p_sec_desc->iq_buffer_len = 0;
1060 src = (uint8_t *)(pos + p_prbMapElm->nRBStart*N_SC_PER_PRB*4L);
1062 if(p_prbMapElm->compMethod == XRAN_COMPMETHOD_NONE) {
1063 payload_len = p_prbMapElm->nRBSize*N_SC_PER_PRB*4L;
1064 memcpy(dst, src, payload_len);
1066 } else if ((p_prbMapElm->compMethod == XRAN_COMPMETHOD_BLKFLOAT) || (p_prbMapElm->compMethod == XRAN_COMPMETHOD_MODULATION)) {
1067 struct xranlib_compress_request bfp_com_req;
1068 struct xranlib_compress_response bfp_com_rsp;
1070 memset(&bfp_com_req, 0, sizeof(struct xranlib_compress_request));
1071 memset(&bfp_com_rsp, 0, sizeof(struct xranlib_compress_response));
1073 bfp_com_req.data_in = (int16_t*)src;
1074 bfp_com_req.numRBs = p_prbMapElm->nRBSize;
1075 bfp_com_req.len = p_prbMapElm->nRBSize*N_SC_PER_PRB*4L;
1076 bfp_com_req.compMethod = p_prbMapElm->compMethod;
1077 bfp_com_req.iqWidth = p_prbMapElm->iqWidth;
1078 bfp_com_req.ScaleFactor= p_prbMapElm->ScaleFactor;
1079 bfp_com_req.reMask = p_prbMapElm->reMask;
1081 bfp_com_rsp.data_out = (int8_t*)dst;
1082 bfp_com_rsp.len = 0;
1084 xranlib_compress(&bfp_com_req, &bfp_com_rsp);
1085 payload_len = bfp_com_rsp.len;
1088 printf ("p_prbMapElm->compMethod == %d is not supported\n",
1089 p_prbMapElm->compMethod);
1093 /* update RB map for given element */
1094 p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(dst, u8dptr);
1095 p_sec_desc->iq_buffer_len = payload_len;
1097 /* add headroom for ORAN headers between IQs for chunk of RBs*/
1099 dst = xran_add_hdr_offset(dst, ((staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE));
1102 p_iq->tx_play_buffer_position[flowId] += pXranConf->nDLRBs*N_SC_PER_PRB*4;
1103 if(p_iq->tx_play_buffer_position[flowId] >= p_iq->tx_play_buffer_size[flowId])
1104 p_iq->tx_play_buffer_position[flowId] = 0;
1106 rte_panic("ptr ==NULL\n");
1114 app_io_xran_iq_content_init_up_prach(uint8_t appMode, struct xran_fh_config *pXranConf,
1115 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1116 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
1120 uint32_t* u32dptr = NULL;
1122 if(p_iq->p_tx_prach_play_buffer[flowId]) {
1123 pos = ((char*)p_iq->p_tx_prach_play_buffer[flowId]);
1124 ptr = psIoCtrl->sFHPrachRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1127 int32_t compMethod = pXranConf->ru_conf.compMeth;
1129 if(compMethod == XRAN_COMPMETHOD_NONE) {
1130 u32dptr = (uint32_t*)(ptr);
1131 memcpy(u32dptr, pos, RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[flowId]));
1132 } else if((compMethod == XRAN_COMPMETHOD_BLKFLOAT)
1133 || (compMethod == XRAN_COMPMETHOD_MODULATION)) {
1134 struct xranlib_compress_request comp_req;
1135 struct xranlib_compress_response comp_rsp;
1137 memset(&comp_req, 0, sizeof(struct xranlib_compress_request));
1138 memset(&comp_rsp, 0, sizeof(struct xranlib_compress_response));
1140 /* compress whole playback data */
1141 comp_req.data_in = (int16_t *)pos;
1142 comp_req.len = RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[flowId]);
1143 comp_req.numRBs = comp_req.len / 12 / 4; /* 12RE, 4bytes */
1144 comp_req.compMethod = compMethod;
1145 comp_req.iqWidth = pXranConf->ru_conf.iqWidth;
1146 comp_req.ScaleFactor = 0; /* TODO */
1147 comp_req.reMask = 0xfff; /* TODO */
1149 comp_rsp.data_out = (int8_t *)ptr;
1152 xranlib_compress(&comp_req, &comp_rsp);
1154 printf ("p_prbMapElm->compMethod == %d is not supported\n", compMethod);
1157 } else { /* if(ptr && pos) */
1158 printf("prach ptr ==NULL\n");
1161 } /* if(p_iq->p_tx_prach_play_buffer[flowId]) */
1167 app_io_xran_iq_content_init_up_srs(uint8_t appMode, struct xran_fh_config *pXranConf,
1168 struct bbu_xran_io_if *psBbuIo, struct xran_io_shared_ctrl *psIoCtrl, struct o_xu_buffers * p_iq,
1169 int32_t cc_id, int32_t ant_id, int32_t sym_id, int32_t tti, int32_t flowId)
1171 struct xran_prb_map * pRbMap = NULL;
1174 uint8_t* u8dptr = NULL;
1175 enum xran_comp_hdr_type staticEn = XRAN_COMP_HDR_TYPE_DYNAMIC;
1177 if (pXranConf != NULL)
1179 staticEn = pXranConf->ru_conf.xranCompHdrType;
1182 if(p_iq->p_tx_srs_play_buffer[flowId]) {
1183 pos = ((char*)p_iq->p_tx_srs_play_buffer[flowId]) + p_iq->tx_srs_play_buffer_position[flowId];;
1184 ptr = psIoCtrl->sFHSrsRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1185 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
1187 if(ptr && pos && pRbMap) {
1189 u8dptr = (uint8_t*)ptr;
1190 int16_t payload_len = 0;
1192 uint8_t *dst = (uint8_t *)u8dptr;
1193 uint8_t *src = (uint8_t *)pos;
1194 struct xran_prb_elm* p_prbMapElm = &pRbMap->prbMap[idxElm];
1195 dst = xran_add_hdr_offset(dst, (staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE);
1196 for (idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++) {
1197 struct xran_section_desc *p_sec_desc = NULL;
1198 p_prbMapElm = &pRbMap->prbMap[idxElm];
1199 p_sec_desc = p_prbMapElm->p_sec_desc[sym_id][0];
1201 if(p_sec_desc == NULL){
1202 rte_panic ("p_sec_desc == NULL\n");
1205 /* skip, if not scheduled */
1206 if(sym_id < p_prbMapElm->nStartSymb || sym_id >= p_prbMapElm->nStartSymb + p_prbMapElm->numSymb) {
1207 p_sec_desc->iq_buffer_offset = 0;
1208 p_sec_desc->iq_buffer_len = 0;
1212 src = (uint8_t *)(pos + p_prbMapElm->nRBStart*N_SC_PER_PRB*4L);
1214 if(p_prbMapElm->compMethod == XRAN_COMPMETHOD_NONE) {
1215 payload_len = p_prbMapElm->nRBSize*N_SC_PER_PRB*4L;
1216 memcpy(dst, src, payload_len);
1218 } else if (p_prbMapElm->compMethod == XRAN_COMPMETHOD_BLKFLOAT
1219 || (p_prbMapElm->compMethod == XRAN_COMPMETHOD_MODULATION)) {
1220 struct xranlib_compress_request bfp_com_req;
1221 struct xranlib_compress_response bfp_com_rsp;
1223 memset(&bfp_com_req, 0, sizeof(struct xranlib_compress_request));
1224 memset(&bfp_com_rsp, 0, sizeof(struct xranlib_compress_response));
1226 bfp_com_req.data_in = (int16_t*)src;
1227 bfp_com_req.numRBs = p_prbMapElm->nRBSize;
1228 bfp_com_req.len = p_prbMapElm->nRBSize*N_SC_PER_PRB*4L;
1229 bfp_com_req.compMethod = p_prbMapElm->compMethod;
1230 bfp_com_req.iqWidth = p_prbMapElm->iqWidth;
1231 bfp_com_req.ScaleFactor= p_prbMapElm->ScaleFactor;
1232 bfp_com_req.reMask = p_prbMapElm->reMask;
1234 bfp_com_rsp.data_out = (int8_t*)dst;
1235 bfp_com_rsp.len = 0;
1237 xranlib_compress(&bfp_com_req, &bfp_com_rsp);
1238 payload_len = bfp_com_rsp.len;
1240 rte_panic ("p_prbMapElm->compMethod == %d is not supported\n", p_prbMapElm->compMethod);
1243 /* update RB map for given element */
1244 p_sec_desc->iq_buffer_offset = RTE_PTR_DIFF(dst, u8dptr);
1245 p_sec_desc->iq_buffer_len = payload_len;
1247 /* add headroom for ORAN headers between IQs for chunk of RBs*/
1249 dst = xran_add_hdr_offset(dst, (staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC) ? p_prbMapElm->compMethod : XRAN_COMPMETHOD_NONE);
1252 rte_panic("[%d %d %d] %p %p %p ==NULL\n",tti, ant_id, sym_id, ptr, pos, pRbMap);
1255 p_iq->tx_srs_play_buffer_position[flowId] += pXranConf->nULRBs*N_SC_PER_PRB*4;
1256 if(p_iq->tx_srs_play_buffer_position[flowId] >= p_iq->tx_srs_play_buffer_size[flowId])
1257 p_iq->tx_srs_play_buffer_position[flowId] = 0;
1265 app_io_xran_iq_content_init(uint32_t o_xu_id, RuntimeConfig *p_o_xu_cfg)
1267 xran_status_t status;
1269 struct bbu_xran_io_if *psBbuIo = app_io_xran_if_get();
1270 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
1271 int32_t nSectorIndex[XRAN_MAX_SECTOR_NR];
1273 int32_t cc_id, ant_id, sym_id, tti;
1276 uint8_t frame_id = 0;
1277 uint8_t subframe_id = 0;
1278 uint8_t slot_id = 0;
1286 struct xran_fh_config *pXranConf = &app_io_xran_fh_config[o_xu_id];
1287 struct xran_fh_init *pXranInit = &app_io_xran_fh_init;
1288 struct o_xu_buffers * p_iq = NULL;
1290 uint32_t xran_max_antenna_nr = RTE_MAX(p_o_xu_cfg->numAxc, p_o_xu_cfg->numUlAxc);
1291 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(p_o_xu_cfg->antElmTRx, xran_max_antenna_nr);
1294 struct xran_prb_map *pRbMap = NULL;
1296 if(psBbuIo == NULL){
1297 rte_panic("psBbuIo == NULL\n");
1300 if(psIoCtrl == NULL){
1301 rte_panic("psIoCtrl == NULL\n");
1304 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++) {
1305 nSectorIndex[nSectorNum] = nSectorNum;
1307 nSectorNum = p_o_xu_cfg->numCC;
1308 printf ("app_io_xran_iq_content_init\n");
1310 if(p_o_xu_cfg->p_buff) {
1311 p_iq = p_o_xu_cfg->p_buff;
1313 rte_panic("Error p_o_xu_cfg->p_buff\n");
1317 for(cc_id = 0; cc_id < nSectorNum; cc_id++) {
1318 for(tti = 0; tti < XRAN_N_FE_BUF_LEN; tti ++) {
1319 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++){
1320 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1321 if(p_o_xu_cfg->appMode == APP_O_DU) {
1322 flowId = p_o_xu_cfg->numAxc * cc_id + ant_id;
1324 flowId = p_o_xu_cfg->numUlAxc * cc_id + ant_id;
1327 if ((status = app_io_xran_iq_content_init_cp_tx(p_o_xu_cfg->appMode, pXranConf,
1328 psBbuIo, psIoCtrl, p_iq,
1329 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1330 rte_panic("app_io_xran_iq_content_init_cp_tx");
1332 if ((status = app_io_xran_iq_content_init_up_tx(p_o_xu_cfg->appMode, pXranConf,
1333 psBbuIo, psIoCtrl, p_iq,
1334 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1335 rte_panic("app_io_xran_iq_content_init_up_tx");
1337 if ((status = app_io_xran_iq_content_init_cp_rx(p_o_xu_cfg->appMode, pXranConf,
1338 psBbuIo, psIoCtrl, p_iq,
1339 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1340 rte_panic("app_io_xran_iq_content_init_cp_rx");
1346 /* prach TX for RU only */
1347 if(p_o_xu_cfg->appMode == APP_O_RU && p_o_xu_cfg->enablePrach) {
1348 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) {
1349 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1350 flowId = p_o_xu_cfg->numAxc*cc_id + ant_id;
1351 if ((status = app_io_xran_iq_content_init_up_prach(p_o_xu_cfg->appMode, pXranConf,
1352 psBbuIo, psIoCtrl, p_iq,
1353 cc_id, ant_id, sym_id, tti, flowId)) != 0) {
1354 rte_panic("app_io_xran_iq_content_init_cp_tx");
1359 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1361 snprintf(fname, sizeof(fname), "./logs/aftercomp-%d.bin", sym_id);
1362 sys_save_buf_to_file(fname,
1363 "Compressed PRACH IQ Samples in binary format",
1364 psIoCtrl->sFHPrachRxBbuIoBufCtrl[0][0][0].sBufferList.pBuffers[sym_id].pData,
1365 RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[0]),
1367 snprintf(fname, sizeof(fname), "./logs/aftercomp-%d.txt", sym_id);
1368 sys_save_buf_to_file_txt(fname,
1369 "Compressed PRACH IQ Samples in human readable format",
1370 psIoCtrl->sFHPrachRxBbuIoBufCtrl[0][0][0].sBufferList.pBuffers[sym_id].pData,
1371 RTE_MIN(PRACH_PLAYBACK_BUFFER_BYTES, p_iq->tx_prach_play_buffer_size[0]),
1376 /* SRS TX for RU only */
1377 if(p_o_xu_cfg->appMode == APP_O_RU && p_o_xu_cfg->enableSrs) {
1378 for(ant_id = 0; ant_id < xran_max_ant_array_elm_nr; ant_id++) {
1379 for(sym_id = 0; sym_id < XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT; sym_id++) {
1380 flowId = p_o_xu_cfg->antElmTRx*cc_id + ant_id;
1381 if ((status = app_io_xran_iq_content_init_up_srs(p_o_xu_cfg->appMode, pXranConf,
1382 psBbuIo, psIoCtrl, p_iq,
1383 cc_id, ant_id, sym_id, tti, flowId)) != 0){
1384 rte_panic("app_io_xran_iq_content_init_cp_tx");
1395 void app_io_xran_if_stop(void)
1397 xran_status_t status = 0;
1398 SWXRANInterfaceTypeEnum eInterfaceType;
1400 status += xran_mm_destroy(app_io_xran_handle)*2;
1402 if(XRAN_STATUS_SUCCESS != status) {
1403 printf("Failed at xran_mm_destroy, status %d\n",status);
1404 iAssert(status == XRAN_STATUS_SUCCESS);
1409 app_io_xran_iq_content_get(uint32_t o_xu_id, RuntimeConfig *p_o_xu_cfg)
1411 struct bbu_xran_io_if *psBbuIo = app_io_xran_if_get();
1412 struct xran_io_shared_ctrl *psIoCtrl = app_io_xran_if_ctrl_get(o_xu_id);
1413 xran_status_t status;
1414 int32_t nSectorIndex[XRAN_MAX_SECTOR_NR];
1416 int32_t cc_id, ant_id, sym_id, tti;
1419 uint8_t frame_id = 0;
1420 uint8_t subframe_id = 0;
1421 uint8_t slot_id = 0;
1423 uint16_t idxDesc = 0;
1430 struct xran_fh_config *pXranConf = &app_io_xran_fh_config[o_xu_id];
1432 uint32_t xran_max_antenna_nr = RTE_MAX(p_o_xu_cfg->numAxc, p_o_xu_cfg->numUlAxc);
1433 uint32_t xran_max_ant_array_elm_nr = RTE_MAX(p_o_xu_cfg->antElmTRx, xran_max_antenna_nr);
1436 struct o_xu_buffers *p_iq = NULL;
1439 rte_panic("psBbuIo == NULL\n");
1441 if(psIoCtrl == NULL)
1442 rte_panic("psIoCtrl == NULL\n");
1444 for (nSectorNum = 0; nSectorNum < XRAN_MAX_SECTOR_NR; nSectorNum++) {
1445 nSectorIndex[nSectorNum] = nSectorNum;
1448 nSectorNum = p_o_xu_cfg->numCC;
1449 printf ("app_io_xran_iq_content_get\n");
1451 if(p_o_xu_cfg->p_buff) {
1452 p_iq = p_o_xu_cfg->p_buff;
1454 printf("Error p_o_xu_cfg->p_buff\n");
1458 for(cc_id = 0; cc_id <nSectorNum; cc_id++) {
1459 for(tti = 0; tti < XRAN_N_FE_BUF_LEN; tti++) {
1460 for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) {
1462 struct xran_prb_map *pRbMap = NULL;
1463 struct xran_prb_elm *pRbElm = NULL;
1464 struct xran_section_desc *p_sec_desc = NULL;
1465 int32_t prach_len = 0;
1467 pRbMap = (struct xran_prb_map *) psIoCtrl->sFrontHaulRxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
1469 printf("pRbMap == NULL\n");
1472 if(p_o_xu_cfg->appMode == APP_O_RU)
1473 flowId = p_o_xu_cfg->numAxc * cc_id + ant_id;
1475 flowId = p_o_xu_cfg->numUlAxc * cc_id + ant_id;
1477 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1478 pRbElm = &pRbMap->prbMap[0];
1479 if(pRbMap->nPrbElm == 1){
1480 if(p_iq->p_rx_log_buffer[flowId]) {
1481 pos = ((char*)p_iq->p_rx_log_buffer[flowId]) + p_iq->rx_log_buffer_position[flowId];
1482 uint32_t one_rb_size = (((pRbElm->iqWidth == 0) || (pRbElm->iqWidth == 16)) ? (N_SC_PER_PRB*2*2) : (3 * pRbElm->iqWidth + 1));
1483 if (app_io_xran_fh_init.mtu < pRbElm->nRBSize * one_rb_size)
1485 ptr = psIoCtrl->sFrontHaulRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1487 int32_t payload_len = 0;
1488 u32dptr = (uint32_t*)(ptr);
1489 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1490 struct xranlib_decompress_request bfp_decom_req;
1491 struct xranlib_decompress_response bfp_decom_rsp;
1494 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1495 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1497 switch(pRbElm->compMethod) {
1498 case XRAN_COMPMETHOD_BLKFLOAT:
1501 case XRAN_COMPMETHOD_MODULATION:
1508 bfp_decom_req.data_in = (int8_t *)u32dptr;
1509 bfp_decom_req.numRBs = pRbElm->nRBSize;
1510 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size) * pRbElm->nRBSize;
1511 bfp_decom_req.compMethod = pRbElm->compMethod;
1512 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1513 bfp_decom_req.reMask = pRbElm->reMask;
1514 bfp_decom_req.ScaleFactor= pRbElm->ScaleFactor;
1516 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1517 bfp_decom_rsp.len = 0;
1519 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1520 payload_len = bfp_decom_rsp.len;
1523 u32dptr = (uint32_t*)(ptr);
1524 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4L , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4L);
1527 printf("%s:%d [%d][%d][%d][%d]ptr ==NULL\n", __FUNCTION__, __LINE__, tti,cc_id,ant_id, sym_id);
1532 p_sec_desc = pRbElm->p_sec_desc[sym_id][0];
1533 if(p_iq->p_rx_log_buffer[flowId] && p_sec_desc){
1534 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb){
1535 pos = ((char*)p_iq->p_rx_log_buffer[flowId]) + p_iq->rx_log_buffer_position[flowId];
1536 ptr = p_sec_desc->pData;
1538 int32_t payload_len = 0;
1539 u32dptr = (uint32_t*)(ptr);
1540 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1541 struct xranlib_decompress_request bfp_decom_req;
1542 struct xranlib_decompress_response bfp_decom_rsp;
1545 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1546 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1547 switch(pRbElm->compMethod) {
1548 case XRAN_COMPMETHOD_BLKFLOAT:
1551 case XRAN_COMPMETHOD_MODULATION:
1558 bfp_decom_req.data_in = (int8_t *)u32dptr;
1559 bfp_decom_req.numRBs = pRbElm->nRBSize;
1560 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size)*pRbElm->nRBSize;
1561 bfp_decom_req.compMethod = pRbElm->compMethod;
1562 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1563 bfp_decom_req.reMask = pRbElm->reMask;
1564 bfp_decom_req.ScaleFactor= pRbElm->ScaleFactor;
1566 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1567 bfp_decom_rsp.len = 0;
1569 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1570 payload_len = bfp_decom_rsp.len;
1574 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4 , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4);
1578 printf("%s:%d [%d][%d][%d][%d]ptr ==NULL\n", __FUNCTION__, __LINE__, tti,cc_id,ant_id, sym_id);
1583 printf("(%d : %d : %d) flowid %d, p_sec_desc is empty\n", tti, sym_id, ant_id,flowId);
1587 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
1588 pRbElm = &pRbMap->prbMap[idxElm];
1589 p_sec_desc = pRbElm->p_sec_desc[sym_id][0];
1590 if(p_iq->p_rx_log_buffer[flowId] && p_sec_desc){
1591 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb){
1592 pos = ((char*)p_iq->p_rx_log_buffer[flowId]) + p_iq->rx_log_buffer_position[flowId];
1593 ptr = p_sec_desc->pData;
1595 int32_t payload_len = 0;
1596 u32dptr = (uint32_t*)(ptr);
1597 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1598 struct xranlib_decompress_request bfp_decom_req;
1599 struct xranlib_decompress_response bfp_decom_rsp;
1602 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1603 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1604 switch(pRbElm->compMethod) {
1605 case XRAN_COMPMETHOD_BLKFLOAT:
1608 case XRAN_COMPMETHOD_MODULATION:
1615 bfp_decom_req.data_in = (int8_t *)u32dptr;
1616 bfp_decom_req.numRBs = pRbElm->nRBSize;
1617 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size)*pRbElm->nRBSize;
1618 bfp_decom_req.compMethod = pRbElm->compMethod;
1619 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1620 bfp_decom_req.reMask = pRbElm->reMask;
1621 bfp_decom_req.ScaleFactor= pRbElm->ScaleFactor;
1623 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1624 bfp_decom_rsp.len = 0;
1626 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1627 payload_len = bfp_decom_rsp.len;
1630 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4 , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4);
1634 // printf("%s:%d [%d][%d][%d][%d]ptr ==NULL\n", __FUNCTION__, __LINE__, tti,cc_id,ant_id, sym_id);
1639 printf("(%d : %d : %d) flowid %d, p_sec_desc is empty\n", tti, sym_id, ant_id,flowId);
1642 p_iq->rx_log_buffer_position[flowId] += pXranConf->nULRBs*N_SC_PER_PRB*4;
1644 if(p_iq->rx_log_buffer_position[flowId] >= p_iq->rx_log_buffer_size[flowId])
1645 p_iq->rx_log_buffer_position[flowId] = 0;
1649 flowId = p_o_xu_cfg->numAxc * cc_id + ant_id;
1650 prach_len = (3 * pXranConf->ru_conf.iqWidth_PRACH) * pXranConf->prach_conf.numPrbc; /* 12RE*2pairs/8bits (12*2/8=3)*/
1651 for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) {
1653 if(p_iq->p_prach_log_buffer[flowId]) {
1654 pos = ((char*)p_iq->p_prach_log_buffer[flowId]) + p_iq->prach_log_buffer_position[flowId];
1655 ptr = psIoCtrl->sFHPrachRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1657 int32_t compMethod = pXranConf->ru_conf.compMeth_PRACH;
1659 if(compMethod == XRAN_COMPMETHOD_NONE) {
1660 memcpy(pos, (uint32_t *)(ptr), prach_len);
1663 struct xranlib_decompress_request decomp_req;
1664 struct xranlib_decompress_response decomp_rsp;
1667 memset(&decomp_req, 0, sizeof(struct xranlib_decompress_request));
1668 memset(&decomp_rsp, 0, sizeof(struct xranlib_decompress_response));
1670 switch(compMethod) {
1671 case XRAN_COMPMETHOD_BLKFLOAT: parm_size = 1; break;
1672 case XRAN_COMPMETHOD_MODULATION: parm_size = 0; break;
1677 decomp_req.data_in = (int8_t *)ptr;
1678 decomp_req.numRBs = pXranConf->prach_conf.numPrbc;
1679 decomp_req.len = (3 * pXranConf->ru_conf.iqWidth_PRACH + parm_size) * pXranConf->prach_conf.numPrbc; /* 12RE*2pairs/8bits (12*2/8=3)*/
1680 decomp_req.compMethod = compMethod;
1681 decomp_req.iqWidth = pXranConf->ru_conf.iqWidth_PRACH;
1682 decomp_req.ScaleFactor = 0; /* TODO */
1683 decomp_req.reMask = 0xfff; /* TODO */
1685 decomp_rsp.data_out = (int16_t *)pos;
1688 xranlib_decompress(&decomp_req, &decomp_rsp);
1692 p_iq->prach_log_buffer_position[flowId] += prach_len;
1694 if(p_iq->prach_log_buffer_position[flowId] >= p_iq->prach_log_buffer_size[flowId])
1695 p_iq->prach_log_buffer_position[flowId] = 0;
1696 } /* if(p_iq->p_prach_log_buffer[flowId]) */
1697 } /* for(sym_id = 0; sym_id < XRAN_NUM_OF_SYMBOL_PER_SLOT; sym_id++) */
1698 } /* for(ant_id = 0; ant_id < xran_max_antenna_nr; ant_id++) */
1700 /* SRS RX for O-DU only */
1701 if(p_o_xu_cfg->appMode == APP_O_DU && p_o_xu_cfg->enableSrs) {
1702 for(ant_id = 0; ant_id < xran_max_ant_array_elm_nr; ant_id++) {
1704 struct xran_prb_map *pRbMap = NULL;
1705 struct xran_prb_elm *pRbElm = NULL;
1706 struct xran_section_desc *p_sec_desc = NULL;
1707 pRbMap = (struct xran_prb_map *) psIoCtrl->sFHSrsRxPrbMapBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers->pData;
1708 if(pRbMap == NULL) {
1709 printf("pRbMap == NULL\n");
1712 flowId = p_o_xu_cfg->antElmTRx*cc_id + ant_id;
1713 if(p_iq->p_srs_log_buffer[flowId]) {
1714 for(sym_id = 0; sym_id < XRAN_MAX_NUM_OF_SRS_SYMBOL_PER_SLOT; sym_id++) {
1715 pRbElm = &pRbMap->prbMap[0];
1716 /*if(pRbMap->nPrbElm == 1) {
1717 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb) {
1718 pos = ((char*)p_iq->p_srs_log_buffer[flowId]) + p_iq->srs_log_buffer_position[flowId];
1719 ptr = psIoCtrl->sFHSrsRxBbuIoBufCtrl[tti][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
1721 int32_t payload_len = 0;
1722 u32dptr = (uint32_t*)(ptr);
1723 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE){
1724 struct xranlib_decompress_request bfp_decom_req;
1725 struct xranlib_decompress_response bfp_decom_rsp;
1728 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1729 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1730 switch(pRbElm->compMethod) {
1731 case XRAN_COMPMETHOD_BLKFLOAT:
1734 case XRAN_COMPMETHOD_MODULATION:
1741 bfp_decom_req.data_in = (int8_t *)u32dptr;
1742 bfp_decom_req.numRBs = pRbElm->nRBSize;
1743 bfp_decom_req.len = (3* pRbElm->iqWidth + parm_size)*pRbElm->nRBSize;
1744 bfp_decom_req.compMethod = pRbElm->compMethod;
1745 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1747 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1748 bfp_decom_rsp.len = 0;
1750 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1751 payload_len = bfp_decom_rsp.len;
1754 u32dptr = (uint32_t*)(ptr);
1755 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4L , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4L);
1758 printf("[%d][%d][%d][%d]ptr ==NULL\n",tti,cc_id,ant_id, sym_id);
1762 for(idxElm = 0; idxElm < pRbMap->nPrbElm; idxElm++ ) {
1763 pRbElm = &pRbMap->prbMap[idxElm];
1764 p_sec_desc = pRbElm->p_sec_desc[sym_id][0];
1765 if(p_iq->p_srs_log_buffer[flowId] && p_sec_desc) {
1766 if(sym_id >= pRbElm->nStartSymb && sym_id < pRbElm->nStartSymb + pRbElm->numSymb) {
1767 pos = ((char*)p_iq->p_srs_log_buffer[flowId]) + p_iq->srs_log_buffer_position[flowId];
1768 ptr = p_sec_desc->pData;
1770 int32_t payload_len = 0;
1771 u32dptr = (uint32_t*)(ptr);
1772 if (pRbElm->compMethod != XRAN_COMPMETHOD_NONE) {
1773 struct xranlib_decompress_request bfp_decom_req;
1774 struct xranlib_decompress_response bfp_decom_rsp;
1777 memset(&bfp_decom_req, 0, sizeof(struct xranlib_decompress_request));
1778 memset(&bfp_decom_rsp, 0, sizeof(struct xranlib_decompress_response));
1779 switch(pRbElm->compMethod) {
1780 case XRAN_COMPMETHOD_BLKFLOAT:
1783 case XRAN_COMPMETHOD_MODULATION:
1790 bfp_decom_req.data_in = (int8_t *)u32dptr;
1791 bfp_decom_req.numRBs = pRbElm->nRBSize;
1792 bfp_decom_req.len = (3 * pRbElm->iqWidth + parm_size)*pRbElm->nRBSize;
1793 bfp_decom_req.compMethod = pRbElm->compMethod;
1794 bfp_decom_req.iqWidth = pRbElm->iqWidth;
1796 bfp_decom_rsp.data_out = (int16_t *)(pos + pRbElm->nRBStart*N_SC_PER_PRB*4);
1797 bfp_decom_rsp.len = 0;
1799 xranlib_decompress(&bfp_decom_req, &bfp_decom_rsp);
1800 payload_len = bfp_decom_rsp.len;
1803 memcpy(pos + pRbElm->nRBStart*N_SC_PER_PRB*4 , u32dptr, pRbElm->nRBSize*N_SC_PER_PRB*4);
1808 printf("(%d : %d : %d) flowid %d, p_sec_desc is empty\n", tti, sym_id, ant_id,flowId);
1812 p_iq->srs_log_buffer_position[flowId] += pXranConf->nULRBs*N_SC_PER_PRB*4;
1814 if(p_iq->srs_log_buffer_position[flowId] >= p_iq->srs_log_buffer_size[flowId])
1815 p_iq->srs_log_buffer_position[flowId] = 0;
1827 app_io_xran_eAxCid_conf_set(struct xran_eaxcid_config *p_eAxC_cfg, RuntimeConfig * p_s_cfg)
1832 if(p_s_cfg->DU_Port_ID_bitwidth && p_s_cfg->BandSector_ID_bitwidth && p_s_cfg->CC_ID_bitwidth
1833 && p_s_cfg->RU_Port_ID_bitwidth &&
1834 (p_s_cfg->DU_Port_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth + p_s_cfg->CC_ID_bitwidth
1835 + p_s_cfg->RU_Port_ID_bitwidth) == 16 /* eAxC ID subfields are 16 bits */
1836 ){ /* bit mask provided */
1839 p_eAxC_cfg->bit_ruPortId = 0;
1840 for (shift = 0; shift < p_s_cfg->RU_Port_ID_bitwidth; shift++){
1843 p_eAxC_cfg->mask_ruPortId = mask;
1845 p_eAxC_cfg->bit_ccId = p_s_cfg->RU_Port_ID_bitwidth;
1847 for (shift = p_s_cfg->RU_Port_ID_bitwidth; shift < p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth; shift++){
1850 p_eAxC_cfg->mask_ccId = mask;
1853 p_eAxC_cfg->bit_bandSectorId = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth;
1855 for (shift = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth; shift < p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth; shift++){
1858 p_eAxC_cfg->mask_bandSectorId = mask;
1860 p_eAxC_cfg->bit_cuPortId = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth;
1862 for (shift = p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth;
1863 shift < p_s_cfg->RU_Port_ID_bitwidth + p_s_cfg->CC_ID_bitwidth + p_s_cfg->BandSector_ID_bitwidth + p_s_cfg->DU_Port_ID_bitwidth; shift++){
1866 p_eAxC_cfg->mask_cuPortId = mask;
1869 } else { /* bit mask config is not provided */
1870 switch (p_s_cfg->xranCat){
1871 case XRAN_CATEGORY_A: {
1872 p_eAxC_cfg->mask_cuPortId = 0xf000;
1873 p_eAxC_cfg->mask_bandSectorId = 0x0f00;
1874 p_eAxC_cfg->mask_ccId = 0x00f0;
1875 p_eAxC_cfg->mask_ruPortId = 0x000f;
1876 p_eAxC_cfg->bit_cuPortId = 12;
1877 p_eAxC_cfg->bit_bandSectorId = 8;
1878 p_eAxC_cfg->bit_ccId = 4;
1879 p_eAxC_cfg->bit_ruPortId = 0;
1882 case XRAN_CATEGORY_B: {
1883 p_eAxC_cfg->mask_cuPortId = 0xf000;
1884 p_eAxC_cfg->mask_bandSectorId = 0x0c00;
1885 p_eAxC_cfg->mask_ccId = 0x0300;
1886 p_eAxC_cfg->mask_ruPortId = 0x00ff; /* more than [0-127] eAxC */
1887 p_eAxC_cfg->bit_cuPortId = 12;
1888 p_eAxC_cfg->bit_bandSectorId = 10;
1889 p_eAxC_cfg->bit_ccId = 8;
1890 p_eAxC_cfg->bit_ruPortId = 0;
1894 rte_panic("Incorrect Category\n");
1898 if(p_s_cfg->xranCat == XRAN_CATEGORY_A)
1899 p_s_cfg->numUlAxc = p_s_cfg->numAxc;
1901 printf("bit_cuPortId %2d mask 0x%04x\n",p_eAxC_cfg->bit_cuPortId, p_eAxC_cfg->mask_cuPortId);
1902 printf("bit_bandSectorId %2d mask 0x%04x\n",p_eAxC_cfg->bit_bandSectorId, p_eAxC_cfg->mask_bandSectorId);
1903 printf("bit_ccId %2d mask 0x%04x\n",p_eAxC_cfg->bit_ccId, p_eAxC_cfg->mask_ccId);
1904 printf("ruPortId %2d mask 0x%04x\n",p_eAxC_cfg->bit_ruPortId, p_eAxC_cfg->mask_ruPortId);
1910 app_io_xran_fh_config_init(UsecaseConfig* p_use_cfg, RuntimeConfig* p_o_xu_cfg, struct xran_fh_init* p_xran_fh_init, struct xran_fh_config* p_xran_fh_cfg)
1914 int32_t o_xu_id = 0;
1915 uint32_t nCenterFreq = 0;
1916 struct xran_prb_map* pRbMap = NULL;
1918 memset(p_xran_fh_cfg, 0, sizeof(struct xran_fh_config));
1920 o_xu_id = p_o_xu_cfg->o_xu_id;
1922 p_xran_fh_cfg->nDLRBs = app_xran_get_num_rbs(p_o_xu_cfg->xranTech, p_o_xu_cfg->mu_number, p_o_xu_cfg->nDLBandwidth, p_o_xu_cfg->nDLAbsFrePointA);
1923 p_xran_fh_cfg->nULRBs = app_xran_get_num_rbs(p_o_xu_cfg->xranTech, p_o_xu_cfg->mu_number, p_o_xu_cfg->nULBandwidth, p_o_xu_cfg->nULAbsFrePointA);
1925 if(p_o_xu_cfg->DynamicSectionEna == 0){
1926 pRbMap = p_o_xu_cfg->p_PrbMapDl;
1928 pRbMap->dir = XRAN_DIR_DL;
1929 pRbMap->xran_port = 0;
1930 pRbMap->band_id = 0;
1932 pRbMap->ru_port_id = 0;
1934 pRbMap->start_sym_id = 0;
1935 pRbMap->nPrbElm = 1;
1936 pRbMap->prbMap[0].nStartSymb = 0;
1937 pRbMap->prbMap[0].numSymb = 14;
1938 pRbMap->prbMap[0].nRBStart = 0;
1939 pRbMap->prbMap[0].nRBSize = p_xran_fh_cfg->nDLRBs;
1940 pRbMap->prbMap[0].nBeamIndex = 0;
1941 pRbMap->prbMap[0].compMethod = XRAN_COMPMETHOD_NONE;
1942 pRbMap->prbMap[0].iqWidth = 16;
1944 pRbMap = p_o_xu_cfg->p_PrbMapUl;
1945 pRbMap->dir = XRAN_DIR_UL;
1946 pRbMap->xran_port = 0;
1947 pRbMap->band_id = 0;
1949 pRbMap->ru_port_id = 0;
1951 pRbMap->start_sym_id = 0;
1952 pRbMap->nPrbElm = 1;
1953 pRbMap->prbMap[0].nStartSymb = 0;
1954 pRbMap->prbMap[0].numSymb = 14;
1955 pRbMap->prbMap[0].nRBStart = 0;
1956 pRbMap->prbMap[0].nRBSize = p_xran_fh_cfg->nULRBs;
1957 pRbMap->prbMap[0].nBeamIndex = 0;
1958 pRbMap->prbMap[0].compMethod = XRAN_COMPMETHOD_NONE;
1959 pRbMap->prbMap[0].iqWidth = 16;
1961 pRbMap = p_o_xu_cfg->p_PrbMapDl;
1963 pRbMap->dir = XRAN_DIR_DL;
1964 pRbMap->xran_port = 0;
1965 pRbMap->band_id = 0;
1967 pRbMap->ru_port_id = 0;
1969 pRbMap->start_sym_id = 0;
1971 pRbMap = p_o_xu_cfg->p_PrbMapUl;
1972 pRbMap->dir = XRAN_DIR_UL;
1973 pRbMap->xran_port = 0;
1974 pRbMap->band_id = 0;
1976 pRbMap->ru_port_id = 0;
1978 pRbMap->start_sym_id = 0;
1980 pRbMap = p_o_xu_cfg->p_PrbMapSrs;
1981 pRbMap->dir = XRAN_DIR_UL;
1982 pRbMap->xran_port = 0;
1983 pRbMap->band_id = 0;
1985 pRbMap->ru_port_id = 0;
1987 pRbMap->start_sym_id = 0;
1990 p_xran_fh_cfg->sector_id = 0;
1991 p_xran_fh_cfg->dpdk_port = o_xu_id;
1992 p_xran_fh_cfg->nCC = p_o_xu_cfg->numCC;
1993 p_xran_fh_cfg->neAxc = p_o_xu_cfg->numAxc;
1994 p_xran_fh_cfg->neAxcUl = p_o_xu_cfg->numUlAxc;
1995 p_xran_fh_cfg->nAntElmTRx = p_o_xu_cfg->antElmTRx;
1997 p_xran_fh_cfg->frame_conf.nFrameDuplexType = p_o_xu_cfg->nFrameDuplexType;
1998 p_xran_fh_cfg->frame_conf.nNumerology = p_o_xu_cfg->mu_number;
1999 p_xran_fh_cfg->frame_conf.nTddPeriod = p_o_xu_cfg->nTddPeriod;
2001 for (i = 0; i < p_o_xu_cfg->nTddPeriod; i++){
2002 p_xran_fh_cfg->frame_conf.sSlotConfig[i] = p_o_xu_cfg->sSlotConfig[i];
2005 p_xran_fh_cfg->prach_conf.nPrachSubcSpacing = p_o_xu_cfg->mu_number;
2006 p_xran_fh_cfg->prach_conf.nPrachFreqStart = 0;
2007 p_xran_fh_cfg->prach_conf.nPrachFilterIdx = XRAN_FILTERINDEX_PRACH_ABC;
2008 p_xran_fh_cfg->prach_conf.nPrachConfIdx = p_o_xu_cfg->prachConfigIndex;
2009 p_xran_fh_cfg->prach_conf.nPrachFreqOffset = -792;
2011 p_xran_fh_cfg->srs_conf.symbMask = p_o_xu_cfg->srsSymMask;
2012 p_xran_fh_cfg->srs_conf.eAxC_offset = 2 * p_o_xu_cfg->numAxc; /* PUSCH, PRACH, SRS */
2014 p_xran_fh_cfg->ru_conf.xranTech = p_o_xu_cfg->xranTech;
2015 p_xran_fh_cfg->ru_conf.xranCompHdrType = p_o_xu_cfg->CompHdrType;
2016 p_xran_fh_cfg->ru_conf.xranCat = p_o_xu_cfg->xranCat;
2017 p_xran_fh_cfg->ru_conf.iqWidth = p_o_xu_cfg->p_PrbMapDl->prbMap[0].iqWidth;
2019 if (p_o_xu_cfg->compression == 0)
2020 p_xran_fh_cfg->ru_conf.compMeth = XRAN_COMPMETHOD_NONE;
2022 p_xran_fh_cfg->ru_conf.compMeth = XRAN_COMPMETHOD_BLKFLOAT;
2024 p_xran_fh_cfg->ru_conf.compMeth_PRACH = p_o_xu_cfg->prachCompMethod;
2025 if (p_o_xu_cfg->prachCompMethod == 0)
2026 p_o_xu_cfg->prachiqWidth = 16;
2027 p_xran_fh_cfg->ru_conf.iqWidth_PRACH = p_o_xu_cfg->prachiqWidth;
2030 p_xran_fh_cfg->ru_conf.fftSize = 0;
2031 while (p_o_xu_cfg->nULFftSize >>= 1)
2032 ++p_xran_fh_cfg->ru_conf.fftSize;
2034 p_xran_fh_cfg->ru_conf.byteOrder = (p_o_xu_cfg->nebyteorderswap == 1) ? XRAN_NE_BE_BYTE_ORDER : XRAN_CPU_LE_BYTE_ORDER ;
2035 p_xran_fh_cfg->ru_conf.iqOrder = (p_o_xu_cfg->iqswap == 1) ? XRAN_Q_I_ORDER : XRAN_I_Q_ORDER;
2037 printf("FFT Order %d\n", p_xran_fh_cfg->ru_conf.fftSize);
2039 nCenterFreq = p_o_xu_cfg->nDLAbsFrePointA + (((p_xran_fh_cfg->nDLRBs * N_SC_PER_PRB) / 2) * app_xran_get_scs(p_o_xu_cfg->mu_number));
2040 p_xran_fh_cfg->nDLCenterFreqARFCN = app_xran_cal_nrarfcn(nCenterFreq);
2041 printf("DL center freq %d DL NR-ARFCN %d\n", nCenterFreq, p_xran_fh_cfg->nDLCenterFreqARFCN);
2043 nCenterFreq = p_o_xu_cfg->nULAbsFrePointA + (((p_xran_fh_cfg->nULRBs * N_SC_PER_PRB) / 2) * app_xran_get_scs(p_o_xu_cfg->mu_number));
2044 p_xran_fh_cfg->nULCenterFreqARFCN = app_xran_cal_nrarfcn(nCenterFreq);
2045 printf("UL center freq %d UL NR-ARFCN %d\n", nCenterFreq, p_xran_fh_cfg->nULCenterFreqARFCN);
2047 p_xran_fh_cfg->bbdev_dec = NULL;
2048 p_xran_fh_cfg->bbdev_enc = NULL;
2050 p_xran_fh_cfg->log_level = 1;
2052 p_xran_fh_cfg->max_sections_per_slot = RTE_MAX(p_o_xu_cfg->max_sections_per_slot, XRAN_MIN_SECTIONS_PER_SLOT);
2053 p_xran_fh_cfg->max_sections_per_symbol = RTE_MAX(p_o_xu_cfg->max_sections_per_symbol, XRAN_MIN_SECTIONS_PER_SLOT);
2055 printf("Max Sections: %d per symb %d per slot\n", p_xran_fh_cfg->max_sections_per_slot, p_xran_fh_cfg->max_sections_per_symbol);
2056 if(p_o_xu_cfg->maxFrameId)
2057 p_xran_fh_cfg->ru_conf.xran_max_frame = p_o_xu_cfg->maxFrameId;
2059 p_xran_fh_cfg->Tadv_cp_dl = p_o_xu_cfg->Tadv_cp_dl;
2060 p_xran_fh_cfg->T2a_min_cp_dl = p_o_xu_cfg->T2a_min_cp_dl;
2061 p_xran_fh_cfg->T2a_max_cp_dl = p_o_xu_cfg->T2a_max_cp_dl;
2062 p_xran_fh_cfg->T2a_min_cp_ul = p_o_xu_cfg->T2a_min_cp_ul;
2063 p_xran_fh_cfg->T2a_max_cp_ul = p_o_xu_cfg->T2a_max_cp_ul;
2064 p_xran_fh_cfg->T2a_min_up = p_o_xu_cfg->T2a_min_up;
2065 p_xran_fh_cfg->T2a_max_up = p_o_xu_cfg->T2a_max_up;
2066 p_xran_fh_cfg->Ta3_min = p_o_xu_cfg->Ta3_min;
2067 p_xran_fh_cfg->Ta3_max = p_o_xu_cfg->Ta3_max;
2068 p_xran_fh_cfg->T1a_min_cp_dl = p_o_xu_cfg->T1a_min_cp_dl;
2069 p_xran_fh_cfg->T1a_max_cp_dl = p_o_xu_cfg->T1a_max_cp_dl;
2070 p_xran_fh_cfg->T1a_min_cp_ul = p_o_xu_cfg->T1a_min_cp_ul;
2071 p_xran_fh_cfg->T1a_max_cp_ul = p_o_xu_cfg->T1a_max_cp_ul;
2072 p_xran_fh_cfg->T1a_min_up = p_o_xu_cfg->T1a_min_up;
2073 p_xran_fh_cfg->T1a_max_up = p_o_xu_cfg->T1a_max_up;
2074 p_xran_fh_cfg->Ta4_min = p_o_xu_cfg->Ta4_min;
2075 p_xran_fh_cfg->Ta4_max = p_o_xu_cfg->Ta4_max;
2077 p_xran_fh_cfg->enableCP = p_o_xu_cfg->enableCP;
2078 p_xran_fh_cfg->prachEnable = p_o_xu_cfg->enablePrach;
2079 p_xran_fh_cfg->srsEnable = p_o_xu_cfg->enableSrs;
2080 p_xran_fh_cfg->puschMaskEnable = p_o_xu_cfg->puschMaskEnable;
2081 p_xran_fh_cfg->puschMaskSlot = p_o_xu_cfg->puschMaskSlot;
2082 p_xran_fh_cfg->debugStop = p_o_xu_cfg->debugStop;
2083 p_xran_fh_cfg->debugStopCount = p_o_xu_cfg->debugStopCount;
2084 p_xran_fh_cfg->DynamicSectionEna = p_o_xu_cfg->DynamicSectionEna;
2085 p_xran_fh_cfg->GPS_Alpha = p_o_xu_cfg->GPS_Alpha;
2086 p_xran_fh_cfg->GPS_Beta = p_o_xu_cfg->GPS_Beta;
2088 p_xran_fh_cfg->cp_vlan_tag = p_o_xu_cfg->cp_vlan_tag;
2089 p_xran_fh_cfg->up_vlan_tag = p_o_xu_cfg->up_vlan_tag;
2096 app_io_xran_fh_init_init(UsecaseConfig* p_use_cfg, RuntimeConfig* p_o_xu_cfg, struct xran_fh_init* p_xran_fh_init)
2100 int32_t o_xu_id = 0;
2101 int32_t pf_link_id = 0;
2102 int32_t num_vfs_cu_p = 2;
2105 memset(p_xran_fh_init, 0, sizeof(struct xran_fh_init));
2107 if(p_o_xu_cfg->appMode == APP_O_DU) {
2108 printf("set O-DU\n");
2109 p_xran_fh_init->io_cfg.id = 0;/* O-DU */
2110 p_xran_fh_init->io_cfg.core = p_use_cfg->io_core;
2111 p_xran_fh_init->io_cfg.system_core = p_use_cfg->system_core;
2112 p_xran_fh_init->io_cfg.pkt_proc_core = p_use_cfg->io_worker; /* do not start */
2113 p_xran_fh_init->io_cfg.pkt_proc_core_64_127 = p_use_cfg->io_worker_64_127;
2114 p_xran_fh_init->io_cfg.pkt_aux_core = 0; /* do not start*/
2115 p_xran_fh_init->io_cfg.timing_core = p_use_cfg->io_core;
2116 p_xran_fh_init->io_cfg.dpdkIoVaMode = p_use_cfg->iova_mode;
2117 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].initiator_en = p_use_cfg->owdmInitEn;
2118 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].measMethod = p_use_cfg->owdmMeasMeth;
2119 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].numberOfSamples = p_use_cfg->owdmNumSamps;
2120 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].filterType = p_use_cfg->owdmFltType;
2121 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].responseTo = p_use_cfg->owdmRspTo;
2122 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].measState = p_use_cfg->owdmMeasState;
2123 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].measId = p_use_cfg->owdmMeasId;
2124 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].owdm_enable = p_use_cfg->owdmEnable;
2125 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_DU].owdm_PlLength = p_use_cfg->owdmPlLength;
2128 printf("set O-RU\n");
2129 p_xran_fh_init->io_cfg.id = 1; /* O-RU*/
2130 p_xran_fh_init->io_cfg.core = p_use_cfg->io_core;
2131 p_xran_fh_init->io_cfg.system_core = p_use_cfg->system_core;
2132 p_xran_fh_init->io_cfg.pkt_proc_core = p_use_cfg->io_worker; /* do not start */
2133 p_xran_fh_init->io_cfg.pkt_aux_core = 0; /* do not start */
2134 p_xran_fh_init->io_cfg.timing_core = p_use_cfg->io_core;
2135 p_xran_fh_init->io_cfg.dpdkIoVaMode = p_use_cfg->iova_mode;
2136 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].initiator_en = p_use_cfg->owdmInitEn;
2137 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].measMethod = p_use_cfg->owdmMeasMeth;
2138 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].numberOfSamples = p_use_cfg->owdmNumSamps;
2139 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].filterType = p_use_cfg->owdmFltType;
2140 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].responseTo = p_use_cfg->owdmRspTo;
2141 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].measState = p_use_cfg->owdmMeasState;
2142 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].measId = p_use_cfg->owdmMeasId;
2143 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].owdm_enable = p_use_cfg->owdmEnable;
2144 p_xran_fh_init->io_cfg.eowd_cmn[APP_O_RU].owdm_PlLength = p_use_cfg->owdmPlLength;
2147 p_xran_fh_init->io_cfg.io_sleep = p_use_cfg->io_sleep;
2148 p_xran_fh_init->io_cfg.dpdkMemorySize = p_use_cfg->dpdk_mem_sz;
2149 p_xran_fh_init->io_cfg.bbdev_mode = XRAN_BBDEV_NOT_USED;
2151 p_xran_fh_init->xran_ports = p_use_cfg->oXuNum;
2152 p_xran_fh_init->io_cfg.nEthLinePerPort = p_use_cfg->EthLinesNumber;
2153 p_xran_fh_init->io_cfg.nEthLineSpeed = p_use_cfg->EthLinkSpeed;
2155 app_io_xran_eAxCid_conf_set(&p_xran_fh_init->eAxCId_conf, p_o_xu_cfg);
2158 if(p_use_cfg->one_vf_cu_plane == 1){
2162 for(o_xu_id = 0; o_xu_id < p_use_cfg->oXuNum; o_xu_id++ ) { /* all O-XU */
2163 for(pf_link_id = 0; pf_link_id < p_use_cfg->EthLinesNumber && pf_link_id < XRAN_ETH_PF_LINKS_NUM; pf_link_id++ ) { /* all PF ports for each O-XU */
2164 if(num_vfs_cu_p*i < (XRAN_VF_MAX - 1)) {
2165 p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i] = &p_use_cfg->o_xu_pcie_bus_addr[o_xu_id][num_vfs_cu_p*pf_link_id][0]; /* U-Plane */
2166 rte_ether_addr_copy(&p_use_cfg->remote_o_xu_addr[o_xu_id][num_vfs_cu_p*pf_link_id], &p_use_cfg->remote_o_xu_addr_copy[num_vfs_cu_p*i]);
2167 printf("VF[%d] %s\n",num_vfs_cu_p*i, p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i]);
2168 if(p_use_cfg->one_vf_cu_plane == 0){
2169 p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i+1] = &p_use_cfg->o_xu_pcie_bus_addr[o_xu_id][num_vfs_cu_p*pf_link_id+1][0]; /* C-Plane */
2170 rte_ether_addr_copy(&p_use_cfg->remote_o_xu_addr[o_xu_id][num_vfs_cu_p*pf_link_id+1], &p_use_cfg->remote_o_xu_addr_copy[num_vfs_cu_p*i+1]);
2171 printf("VF[%d] %s\n",num_vfs_cu_p*i+1, p_xran_fh_init->io_cfg.dpdk_dev[num_vfs_cu_p*i+1]);
2180 p_xran_fh_init->io_cfg.one_vf_cu_plane = p_use_cfg->one_vf_cu_plane;
2182 if(p_xran_fh_init->io_cfg.one_vf_cu_plane) {
2183 p_use_cfg->num_vfs = i;
2185 p_use_cfg->num_vfs = 2*i;
2187 printf("p_use_cfg->num_vfs %d\n", p_use_cfg->num_vfs);
2188 printf("p_use_cfg->num_rxq %d\n", p_use_cfg->num_rxq);
2190 p_xran_fh_init->io_cfg.num_vfs = p_use_cfg->num_vfs;
2191 p_xran_fh_init->io_cfg.num_rxq = p_use_cfg->num_rxq;
2192 p_xran_fh_init->mtu = p_o_xu_cfg->mtu;
2193 if(p_use_cfg->appMode == APP_O_DU){
2194 p_xran_fh_init->p_o_du_addr = (int8_t *)p_o_xu_cfg->o_du_addr;
2195 p_xran_fh_init->p_o_ru_addr = (int8_t *)p_use_cfg->remote_o_xu_addr_copy;
2197 p_xran_fh_init->p_o_du_addr = (int8_t *)p_use_cfg->remote_o_xu_addr_copy;
2198 p_xran_fh_init->p_o_ru_addr = (int8_t *)p_o_xu_cfg->o_ru_addr;
2201 snprintf(p_use_cfg->prefix_name, sizeof(p_use_cfg->prefix_name), "wls_%d",p_use_cfg->instance_id);
2202 p_xran_fh_init->filePrefix = p_use_cfg->prefix_name;
2203 p_xran_fh_init->totalBfWeights = p_o_xu_cfg->totalBfWeights;
2206 for(o_xu_id = 0; o_xu_id < p_use_cfg->oXuNum && o_xu_id < XRAN_PORTS_NUM; o_xu_id++ ) { /* all O-XU */
2207 if(p_o_xu_buff[o_xu_id] == NULL) {
2208 ptr = _mm_malloc(sizeof(struct o_xu_buffers), 256);
2210 rte_panic("_mm_malloc: Can't allocate %lu bytes\n", sizeof(struct o_xu_buffers));
2212 p_o_xu_buff[o_xu_id] = (struct o_xu_buffers*)ptr;
2215 p_o_xu_cfg->p_buff = p_o_xu_buff[o_xu_id];
2223 app_io_xran_buffers_max_sz_set (RuntimeConfig* p_o_xu_cfg)
2225 uint32_t xran_max_sections_per_slot = RTE_MAX(p_o_xu_cfg->max_sections_per_slot, XRAN_MIN_SECTIONS_PER_SLOT);
2227 if (p_o_xu_cfg->mu_number <= 1){
2228 if (p_o_xu_cfg->mtu > XRAN_MTU_DEFAULT) {
2229 nFpgaToSW_FTH_RxBufferLen = 13168; /* 273*12*4 + 64*/
2230 nFpgaToSW_PRACH_RxBufferLen = 8192;
2231 nSW_ToFpga_FTH_TxBufferLen = 13168 + /* 273*12*4 + 64* + ETH AND ORAN HDRs */
2232 xran_max_sections_per_slot* (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2233 sizeof(struct xran_ecpri_hdr) +
2234 sizeof(struct radio_app_common_hdr) +
2235 sizeof(struct data_section_hdr));
2237 nFpgaToSW_FTH_RxBufferLen = XRAN_MTU_DEFAULT; /* 273*12*4 + 64*/
2238 nFpgaToSW_PRACH_RxBufferLen = XRAN_MTU_DEFAULT;
2239 nSW_ToFpga_FTH_TxBufferLen = 13168 + /* 273*12*4 + 64* + ETH AND ORAN HDRs */
2240 xran_max_sections_per_slot* (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2241 sizeof(struct xran_ecpri_hdr) +
2242 sizeof(struct radio_app_common_hdr) +
2243 sizeof(struct data_section_hdr));
2245 } else if (p_o_xu_cfg->mu_number == 3) {
2246 if (p_o_xu_cfg->mtu > XRAN_MTU_DEFAULT) {
2247 nFpgaToSW_FTH_RxBufferLen = 3328;
2248 nFpgaToSW_PRACH_RxBufferLen = 8192;
2249 nSW_ToFpga_FTH_TxBufferLen = 3328 +
2250 xran_max_sections_per_slot * (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2251 sizeof(struct xran_ecpri_hdr) +
2252 sizeof(struct radio_app_common_hdr) +
2253 sizeof(struct data_section_hdr));
2255 nFpgaToSW_FTH_RxBufferLen = XRAN_MTU_DEFAULT;
2256 nFpgaToSW_PRACH_RxBufferLen = XRAN_MTU_DEFAULT;
2257 nSW_ToFpga_FTH_TxBufferLen = 3328 +
2258 xran_max_sections_per_slot * (RTE_PKTMBUF_HEADROOM + sizeof(struct rte_ether_hdr) +
2259 sizeof(struct xran_ecpri_hdr) +
2260 sizeof(struct radio_app_common_hdr) +
2261 sizeof(struct data_section_hdr));
2264 printf("given numerology is not supported %d\n", p_o_xu_cfg->mu_number);
2267 printf("nSW_ToFpga_FTH_TxBufferLen %d\n", nSW_ToFpga_FTH_TxBufferLen);