* INTC Contribution to the O-RAN F Release for O-DU Low
[o-du/phy.git] / fhi_lib / lib / src / xran_tx_proc.c
1 /******************************************************************************
2 *
3 *   Copyright (c) 2020 Intel.
4 *
5 *   Licensed under the Apache License, Version 2.0 (the "License");
6 *   you may not use this file except in compliance with the License.
7 *   You may obtain a copy of the License at
8 *
9 *       http://www.apache.org/licenses/LICENSE-2.0
10 *
11 *   Unless required by applicable law or agreed to in writing, software
12 *   distributed under the License is distributed on an "AS IS" BASIS,
13 *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 *   See the License for the specific language governing permissions and
15 *   limitations under the License.
16 *
17 *******************************************************************************/
18
19 /**
20  * @brief XRAN TX functionality
21  * @file xran_tx.c
22  * @ingroup group_source_xran
23  * @author Intel Corporation
24  **/
25
26 #define _GNU_SOURCE
27 #include <sched.h>
28 #include <assert.h>
29 #include <err.h>
30 #include <libgen.h>
31 #include <sys/time.h>
32 #include <sys/queue.h>
33 #include <time.h>
34 #include <unistd.h>
35 #include <stdio.h>
36 #include <pthread.h>
37 #include <malloc.h>
38 #include <immintrin.h>
39
40 #include <rte_common.h>
41 #include <rte_eal.h>
42 #include <rte_errno.h>
43 #include <rte_lcore.h>
44 #include <rte_cycles.h>
45 #include <rte_memory.h>
46 #include <rte_memzone.h>
47 #include <rte_mbuf.h>
48 #include <rte_ring.h>
49 #include <rte_ethdev.h>
50
51 #include "xran_fh_o_du.h"
52
53 #include "ethdi.h"
54 #include "xran_pkt.h"
55 #include "xran_up_api.h"
56 #include "xran_cp_api.h"
57 #include "xran_sync_api.h"
58 #include "xran_lib_mlog_tasks_id.h"
59 #include "xran_timer.h"
60 #include "xran_main.h"
61 #include "xran_common.h"
62 #include "xran_dev.h"
63 #include "xran_frame_struct.h"
64 #include "xran_printf.h"
65 #include "xran_tx_proc.h"
66 #include "xran_cp_proc.h"
67
68 #include "xran_mlog_lnx.h"
69
70 enum xran_in_period
71 {
72      XRAN_IN_PREV_PERIOD  = 0,
73      XRAN_IN_CURR_PERIOD,
74      XRAN_IN_NEXT_PERIOD
75 };
76
77 extern int32_t first_call;
78
79 struct rte_mbuf *
80 xran_attach_up_ext_buf(uint16_t vf_id, int8_t* p_ext_buff_start, int8_t* p_ext_buff, uint16_t ext_buff_len,
81                 struct rte_mbuf_ext_shared_info * p_share_data,
82                 enum xran_compression_method compMeth, enum xran_comp_hdr_type staticEn);
83
84
85 static void
86 extbuf_free_callback(void *addr __rte_unused, void *opaque __rte_unused)
87 {
88     /*long t1 = MLogTick();
89     MLogTask(77777, t1, t1+100);*/
90 }
91
92 static inline int32_t XranOffsetSym(int32_t offSym, int32_t otaSym, int32_t numSymTotal, enum xran_in_period* pInPeriod)
93 {
94     int32_t sym;
95
96     // Suppose the offset is usually small
97     if (unlikely(offSym > otaSym))
98     {
99         sym = numSymTotal - offSym + otaSym;
100         *pInPeriod = XRAN_IN_PREV_PERIOD;
101     }
102     else
103     {
104         sym = otaSym - offSym;
105
106         if (unlikely(sym >= numSymTotal))
107         {
108             sym -= numSymTotal;
109             *pInPeriod = XRAN_IN_NEXT_PERIOD;
110         }
111         else
112         {
113             *pInPeriod = XRAN_IN_CURR_PERIOD;
114         }
115     }
116
117     return sym;
118 }
119
120 // Return SFN at current second start, 10 bits, [0, 1023]
121 uint16_t xran_getSfnSecStart(void)
122 {
123     return xran_SFN_at_Sec_Start;
124 }
125
126 /* Send burst of packets on an output interface */
127 static inline int
128 xran_send_burst(struct xran_device_ctx *dev, struct mbuf_table* p_m_table, uint16_t port)
129 {
130     struct xran_common_counters *  pCnt  = NULL;
131     struct rte_mbuf **m_table;
132     int32_t i   = 0;
133     int32_t n   = 0;
134     int32_t ret = 0;
135
136     if(dev)
137         pCnt = &dev->fh_counters;
138     else
139         rte_panic("incorrect dev\n");
140
141     m_table = p_m_table->m_table;
142     n       = p_m_table->len;
143
144     for(i = 0; i < n; i++) {
145         /*rte_mbuf_sanity_check(m_table[i], 0);*/
146         /*rte_pktmbuf_dump(stdout, m_table[i], 256);*/
147         pCnt->tx_counter++;
148         pCnt->tx_bytes_counter += rte_pktmbuf_pkt_len(m_table[i]);
149         ret += dev->send_upmbuf2ring(m_table[i], ETHER_TYPE_ECPRI, port);
150     }
151
152     if (unlikely(ret < n)) {
153         print_err("core %d [p: %d-> vf %d] ret [%d] < n[%d] enq %ld\n",
154              rte_lcore_id(), dev->xran_port_id, port, ret, n, pCnt->tx_counter);
155     }
156
157     return 0;
158 }
159
160 /* Send a single 5G symbol over multiple packets */
161 static inline int32_t prepare_symbol_opt(enum xran_pkt_dir direction,
162                 uint16_t section_id,
163                 struct rte_mbuf *mb,
164                 struct rb_map *data,
165                 uint8_t compMeth,
166                 uint8_t iqWidth,
167                 const enum xran_input_byte_order iq_buf_byte_order,
168                 int prb_start,
169                 int prb_num,
170                 uint8_t CC_ID,
171                 uint8_t RU_Port_ID,
172                 uint8_t seq_id,
173                 uint32_t do_copy,
174                 struct xran_up_pkt_gen_params *xp,
175                 enum xran_comp_hdr_type staticEn)
176 {
177     int parm_size;
178     int32_t n_bytes;
179     int32_t prep_bytes;
180     int16_t nPktSize;
181
182     iqWidth = (iqWidth==0) ? 16 : iqWidth;
183     switch(compMeth) {
184         case XRAN_COMPMETHOD_BLKFLOAT:      parm_size = 1; break;
185         case XRAN_COMPMETHOD_MODULATION:    parm_size = 0; break;
186         default:
187             parm_size = 0;
188         }
189     n_bytes = (3 * iqWidth + parm_size) * prb_num;
190     n_bytes = RTE_MIN(n_bytes, XRAN_MAX_MBUF_LEN);
191
192     nPktSize = sizeof(struct rte_ether_hdr)
193                 + sizeof(struct xran_ecpri_hdr)
194                 + sizeof(struct radio_app_common_hdr)
195                 + sizeof(struct data_section_hdr)
196                 + n_bytes;
197     if ((compMeth != XRAN_COMPMETHOD_NONE)&&(staticEn ==XRAN_COMP_HDR_TYPE_DYNAMIC))
198         nPktSize += sizeof(struct data_section_compression_hdr);
199
200
201 #if 0
202     /* radio app header */
203     xp->app_params.data_direction = direction;
204     xp->app_params.payl_ver       = 1;
205     xp->app_params.filter_id      = 0;
206     xp->app_params.frame_id       = frame_id;
207     xp->app_params.sf_slot_sym.subframe_id    = subframe_id;
208     xp->app_params.sf_slot_sym.slot_id        = xran_slotid_convert(slot_id, 0);
209     xp->app_params.sf_slot_sym.symb_id        = symbol_no;
210
211     /* convert to network byte order */
212     xp->app_params.sf_slot_sym.value = rte_cpu_to_be_16(xp->app_params.sf_slot_sym.value);
213 #endif
214
215     xp->sec_hdr.fields.sect_id    = section_id;
216     xp->sec_hdr.fields.num_prbu   = (uint8_t)XRAN_CONVERT_NUMPRBC(prb_num);
217     xp->sec_hdr.fields.start_prbu = (uint8_t)prb_start;
218     xp->sec_hdr.fields.sym_inc    = 0;
219     xp->sec_hdr.fields.rb         = 0;
220
221
222     /* compression */
223     xp->compr_hdr_param.ud_comp_hdr.ud_comp_meth = compMeth;
224     xp->compr_hdr_param.ud_comp_hdr.ud_iq_width  = XRAN_CONVERT_IQWIDTH(iqWidth);
225     xp->compr_hdr_param.rsrvd                    = 0;
226
227     /* network byte order */
228     xp->sec_hdr.fields.all_bits  = rte_cpu_to_be_32(xp->sec_hdr.fields.all_bits);
229
230     if (mb == NULL){
231         MLogPrint(NULL);
232         errx(1, "out of mbufs after %d packets", 1);
233     }
234
235     prep_bytes = xran_prepare_iq_symbol_portion(mb,
236                                                 data,
237                                                 iq_buf_byte_order,
238                                                 n_bytes,
239                                                 xp,
240                                                 CC_ID,
241                                                 RU_Port_ID,
242                                                 seq_id,
243                                                 staticEn,
244                                                 do_copy,
245                                                 1,
246                                                 section_id,
247                                                 0);
248     if (prep_bytes <= 0)
249         errx(1, "failed preparing symbol");
250
251     rte_pktmbuf_pkt_len(mb)  = nPktSize;
252     rte_pktmbuf_data_len(mb) = nPktSize;
253
254 #ifdef DEBUG
255     printf("Symbol %2d prep_bytes (%d packets, %d bytes)\n", symbol_no, i, n_bytes);
256 #endif
257
258     return prep_bytes;
259 }
260
261 int32_t xran_process_tx_sym_cp_off(void *pHandle, uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id,
262     int32_t do_srs)
263 {
264     int32_t     retval = 0;
265     char        *pos = NULL;
266     char        *p_sec_iq = NULL;
267     void        *mb  = NULL;
268     void        *send_mb  = NULL;
269     // int         prb_num = 0;
270     uint16_t    vf_id = 0 , num_sections = 0, curr_sect_id = 0 ;
271
272     struct xran_prb_map *prb_map = NULL;
273     //uint8_t  num_ant_elm  = 0;
274
275     struct xran_device_ctx * p_xran_dev_ctx = (struct xran_device_ctx *)pHandle;
276     if (p_xran_dev_ctx == NULL)
277         return retval;
278     struct xran_common_counters * pCnt = &p_xran_dev_ctx->fh_counters;
279     //struct xran_prach_cp_config *pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
280     //struct xran_srs_config *p_srs_cfg = &(p_xran_dev_ctx->srs_cfg);
281
282     //num_ant_elm = xran_get_num_ant_elm(pHandle);
283     enum xran_pkt_dir direction;
284     enum xran_comp_hdr_type staticEn = XRAN_COMP_HDR_TYPE_DYNAMIC;
285
286     struct rte_mbuf *eth_oran_hdr = NULL;
287     char        *ext_buff = NULL;
288     uint16_t    ext_buff_len = 0;
289     struct rte_mbuf *tmp = NULL;
290     rte_iova_t ext_buff_iova = 0;
291     uint8_t PortId = p_xran_dev_ctx->xran_port_id;
292
293     staticEn = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
294
295
296     if(PortId >= XRAN_PORTS_NUM)
297         rte_panic("incorrect PORT ID\n");
298
299     struct rte_mbuf_ext_shared_info * p_share_data = NULL;
300     if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
301         direction = XRAN_DIR_DL; /* O-DU */
302         //prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
303     } else {
304         direction = XRAN_DIR_UL; /* RU */
305         //prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
306     }
307
308     if(xran_fs_get_slot_type(PortId, cc_id, tti, ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SLOT_TYPE_DL : XRAN_SLOT_TYPE_UL)) ==  1
309             || xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_SP) ==  1
310             || xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_FDD) ==  1){
311
312         if(xran_fs_get_symbol_type(PortId, cc_id, tti, sym_id) == ((p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)? XRAN_SYMBOL_TYPE_DL : XRAN_SYMBOL_TYPE_UL)
313            || xran_fs_get_symbol_type(PortId, cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_FDD){
314
315             vf_id = xran_map_ecpriPcid_to_vf(p_xran_dev_ctx, direction, cc_id, ant_id);
316             pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
317             mb  = (void*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
318             prb_map  = (struct xran_prb_map *) p_xran_dev_ctx->sFrontHaulTxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers->pData;
319
320
321             if(prb_map){
322                 int32_t elmIdx = 0;
323                 for (elmIdx = 0; elmIdx < prb_map->nPrbElm && elmIdx < XRAN_MAX_SECTIONS_PER_SLOT; elmIdx++){
324                     //print_err("tti is %d, cc_id is %d, ant_id is %d, prb_map->nPrbElm id - %d", tti % XRAN_N_FE_BUF_LEN, cc_id, ant_id, prb_map->nPrbElm);
325                     struct xran_prb_elm * prb_map_elm = &prb_map->prbMap[elmIdx];
326                     struct xran_section_desc * p_sec_desc = NULL;
327                     uint16_t sec_id  = prb_map_elm->nSectId;
328                     p_share_data = &p_xran_dev_ctx->share_data.sh_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sec_id];
329
330                     if(unlikely(sym_id < prb_map_elm->nStartSymb || sym_id >= (prb_map_elm->nStartSymb + prb_map_elm->numSymb)))
331                           continue;
332
333                     if(prb_map_elm == NULL){
334                         rte_panic("p_sec_desc == NULL\n");
335                     }
336
337                     p_sec_desc = &prb_map_elm->sec_desc[sym_id][0];
338
339                     p_sec_iq     = ((char*)pos + p_sec_desc->iq_buffer_offset);
340
341                     /* calculate offset for external buffer */
342                     ext_buff_len = p_sec_desc->iq_buffer_len;
343                     ext_buff = p_sec_iq - (RTE_PKTMBUF_HEADROOM +
344                                     sizeof (struct xran_ecpri_hdr) +
345                                     sizeof (struct radio_app_common_hdr) +
346                                     sizeof(struct data_section_hdr));
347
348                     ext_buff_len += RTE_PKTMBUF_HEADROOM +
349                                     sizeof (struct xran_ecpri_hdr) +
350                                     sizeof (struct radio_app_common_hdr) +
351                                     sizeof(struct data_section_hdr) + 18;
352
353                         if ((prb_map_elm->compMethod != XRAN_COMPMETHOD_NONE)&&(staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC)){
354                         ext_buff     -= sizeof (struct data_section_compression_hdr);
355                         ext_buff_len += sizeof (struct data_section_compression_hdr);
356                     }
357
358                     eth_oran_hdr = rte_pktmbuf_alloc(_eth_mbuf_pool_vf_small[vf_id]);
359                     if (unlikely (( eth_oran_hdr) == NULL)) {
360                         rte_panic("Failed rte_pktmbuf_alloc\n");
361                     }
362
363                     p_share_data->free_cb = extbuf_free_callback;
364                     p_share_data->fcb_opaque = NULL;
365                     rte_mbuf_ext_refcnt_set(p_share_data, 1);
366
367                     ext_buff_iova = rte_mempool_virt2iova(mb);
368                     if (unlikely (( ext_buff_iova) == 0)) {
369                         rte_panic("Failed rte_mem_virt2iova \n");
370                     }
371
372                     if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
373                         rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
374                     }
375
376                     rte_pktmbuf_attach_extbuf(eth_oran_hdr,
377                                               ext_buff,
378                                               ext_buff_iova + RTE_PTR_DIFF(ext_buff , mb),
379                                               ext_buff_len,
380                                               p_share_data);
381
382                     rte_pktmbuf_reset_headroom(eth_oran_hdr);
383
384                     tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(eth_oran_hdr, sizeof(struct rte_ether_hdr));
385                     if (unlikely (( tmp) == NULL)) {
386                         rte_panic("Failed rte_pktmbuf_prepend \n");
387                     }
388                     send_mb = eth_oran_hdr;
389
390
391                     uint8_t seq_id = (p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) ?
392                                           xran_get_updl_seqid(pHandle, cc_id, ant_id) :
393                                           xran_get_upul_seqid(pHandle, cc_id, ant_id);
394
395
396                     if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU
397                                         && xran_get_ru_category(pHandle) == XRAN_CATEGORY_B)
398                     {
399                         num_sections = (prb_map_elm->bf_weight.extType == 1) ? prb_map_elm->bf_weight.numSetBFWs : 1 ;
400                         if (prb_map_elm->bf_weight.extType != 1) 
401                             curr_sect_id = sec_id;
402                     }
403                     else
404                         num_sections = 1;
405
406                     /* first all PRBs */
407                     prepare_symbol_ex(direction, curr_sect_id,
408                                       send_mb,
409                                       (uint8_t *)p_sec_iq,
410                                       prb_map_elm->compMethod,
411                                       prb_map_elm->iqWidth,
412                                       p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
413                                       frame_id, subframe_id, slot_id, sym_id,
414                                       prb_map_elm->UP_nRBStart, prb_map_elm->UP_nRBSize,
415                                       cc_id, ant_id,
416                                       seq_id,
417                                       0,
418                                       staticEn,
419                                       num_sections,
420                                       p_sec_desc->iq_buffer_offset);
421
422                     curr_sect_id += num_sections;
423
424                     rte_mbuf_sanity_check((struct rte_mbuf *)send_mb, 0);
425                     pCnt->tx_counter++;
426                     pCnt->tx_bytes_counter += rte_pktmbuf_pkt_len((struct rte_mbuf *)send_mb);
427                     p_xran_dev_ctx->send_upmbuf2ring((struct rte_mbuf *)send_mb, ETHER_TYPE_ECPRI, vf_id);
428                 } /* for (elmIdx = 0; elmIdx < prb_map->nPrbElm && elmIdx < XRAN_MAX_SECTIONS_PER_SLOT; elmIdx++) */
429             } else {
430                 printf("(%d %d %d %d) prb_map == NULL\n", tti % XRAN_N_FE_BUF_LEN, cc_id, ant_id, sym_id);
431             }
432
433         } /* RU mode or C-Plane is not used */
434     }
435     return retval;
436 }
437 int32_t xran_process_tx_prach_cp_off(void *pHandle, uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id)
438 {
439     int32_t     retval = 0;
440     char        *pos = NULL;
441     void        *mb  = NULL;
442
443     struct xran_device_ctx * p_xran_dev_ctx = (struct xran_device_ctx *)pHandle;
444     if (p_xran_dev_ctx == NULL)
445         return retval;
446
447     struct xran_prach_cp_config *pPrachCPConfig;
448     if(p_xran_dev_ctx->dssEnable){
449         int i = tti % p_xran_dev_ctx->dssPeriod;
450         if(p_xran_dev_ctx->technology[i]==1) {
451             pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
452         }
453         else{
454             pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfigLTE);
455         }
456     }
457     else{
458         pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
459     }
460
461     enum xran_pkt_dir direction = XRAN_DIR_UL;
462     uint8_t PortId = p_xran_dev_ctx->xran_port_id;
463
464
465     if(PortId >= XRAN_PORTS_NUM)
466         rte_panic("incorrect PORT ID\n");
467
468
469     if(p_xran_dev_ctx->enablePrach
470           && (p_xran_dev_ctx->fh_init.io_cfg.id == O_RU) && (ant_id < XRAN_MAX_PRACH_ANT_NUM)){
471
472         if(xran_fs_get_symbol_type(PortId, cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_UL
473           || xran_fs_get_symbol_type(PortId, cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_FDD) {   /* Only RU needs to send PRACH I/Q */
474
475           uint32_t is_prach_slot = xran_is_prach_slot(PortId, subframe_id, slot_id);
476
477             if(((frame_id % pPrachCPConfig->x) == pPrachCPConfig->y[0])
478                     && (is_prach_slot == 1)
479                     && (sym_id >= p_xran_dev_ctx->prach_start_symbol[cc_id])
480                     && (sym_id <= p_xran_dev_ctx->prach_last_symbol[cc_id])) {
481                 int prach_port_id = ant_id + pPrachCPConfig->eAxC_offset;
482                 int compMethod;
483                 //int parm_size;
484                 uint8_t symb_id_offset = sym_id - p_xran_dev_ctx->prach_start_symbol[cc_id];
485
486                 compMethod = p_xran_dev_ctx->fh_cfg.ru_conf.compMeth_PRACH;
487 #if 0
488                 switch(compMethod) {
489                     case XRAN_COMPMETHOD_BLKFLOAT:      parm_size = 1; break;
490                     case XRAN_COMPMETHOD_MODULATION:    parm_size = 0; break;
491                     default:
492                         parm_size = 0;
493                     }
494 #endif
495                 pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[symb_id_offset].pData;
496                 //pos += (sym_id - p_xran_dev_ctx->prach_start_symbol[cc_id]) * pPrachCPConfig->numPrbc * N_SC_PER_PRB * 4;
497                 /*pos += (sym_id - p_xran_dev_ctx->prach_start_symbol[cc_id])
498                         * (3*p_xran_dev_ctx->fh_cfg.ru_conf.iqWidth + parm_size)
499                         * pPrachCPConfig->numPrbc;*/
500                 mb  = NULL;//(void*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pCtrl;
501
502                 struct xran_prach_cp_config *pPrachCPConfig;
503                 if(p_xran_dev_ctx->dssEnable){
504                     int i = tti % p_xran_dev_ctx->dssPeriod;
505                     if(p_xran_dev_ctx->technology[i]==1) {
506                         pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
507                     }
508                     else{
509                         pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfigLTE);
510                     }
511                 }
512                 else{
513                     pPrachCPConfig = &(p_xran_dev_ctx->PrachCPConfig);
514                 }
515                 
516                 
517                 if (1500 == p_xran_dev_ctx->fh_init.mtu && pPrachCPConfig->filterIdx == XRAN_FILTERINDEX_PRACH_012)
518                 {
519                     pos = (char*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pData;
520                     mb  = (void*) p_xran_dev_ctx->sFHPrachRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[0].pCtrl;
521                     /*one prach for more then one pkg*/
522                     send_symbol_mult_section_ex(pHandle,
523                         direction,
524                         xran_alloc_sectionid(pHandle, direction, cc_id, prach_port_id, subframe_id, slot_id),
525                         (struct rte_mbuf *)mb,
526                         (uint8_t *)pos,
527                         compMethod,
528                         p_xran_dev_ctx->fh_cfg.ru_conf.iqWidth,
529                         p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
530                         frame_id, subframe_id, slot_id, sym_id,
531                         pPrachCPConfig->startPrbc, pPrachCPConfig->numPrbc,
532                         cc_id, prach_port_id,
533                         0);
534                 }
535                 else{
536                     send_symbol_ex(pHandle,
537                         direction,
538                         xran_alloc_sectionid(pHandle, direction, cc_id, prach_port_id, subframe_id, slot_id),
539                         (struct rte_mbuf *)mb,
540                         (uint8_t *)pos,
541                         compMethod,
542                         p_xran_dev_ctx->fh_cfg.ru_conf.iqWidth_PRACH,
543                         p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
544                         frame_id, subframe_id, slot_id, sym_id,
545                         pPrachCPConfig->startPrbc, pPrachCPConfig->numPrbc,
546                         cc_id, prach_port_id,
547                         xran_get_upul_seqid(pHandle, cc_id, prach_port_id));
548                 }
549                 retval = 1;
550             }
551         } /* if(p_xran_dev_ctx->enablePrach ..... */
552     }
553   return retval;
554 }
555 int32_t
556 xran_process_tx_srs_cp_off(void *pHandle, uint8_t ctx_id, uint32_t tti, int32_t cc_id, int32_t ant_id,
557             uint32_t frame_id, uint32_t subframe_id, uint32_t slot_id)
558 {
559     int32_t     retval = 0;
560     char        *pos = NULL;
561     char        *p_sec_iq = NULL;
562     void        *mb  = NULL;
563     char        *ext_buff = NULL;
564     uint16_t    ext_buff_len = 0 , num_sections=0 , section_id=0;
565     int32_t     antElm_eAxC_id;
566     uint32_t    vf_id = 0;
567     int32_t     elmIdx;
568     uint32_t    sym_id;
569     enum xran_pkt_dir direction;
570     enum xran_comp_hdr_type staticEn;
571
572     rte_iova_t ext_buff_iova = 0;
573     struct rte_mbuf *tmp = NULL;
574     struct xran_prb_map *prb_map = NULL;
575     struct xran_device_ctx * p_xran_dev_ctx;
576     struct xran_common_counters *pCnt;
577     //struct xran_prach_cp_config *pPrachCPConfig;
578     struct xran_srs_config *p_srs_cfg;
579     struct rte_mbuf *eth_oran_hdr = NULL;
580     struct rte_mbuf_ext_shared_info *p_share_data = NULL;
581
582
583     p_xran_dev_ctx  = (struct xran_device_ctx *)pHandle;
584     if(p_xran_dev_ctx == NULL)
585     {
586         print_err("dev_ctx is NULL. ctx_id=%d, tti=%d, cc_id=%d, ant_id=%d, frame_id=%d, subframe_id=%d, slot_id=%d\n",
587                     ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id);
588         return 0;
589     }
590
591     if(p_xran_dev_ctx->xran_port_id >= XRAN_PORTS_NUM)
592         rte_panic("incorrect PORT ID\n");
593
594     pCnt            = &p_xran_dev_ctx->fh_counters;
595     //pPrachCPConfig  = &(p_xran_dev_ctx->PrachCPConfig);
596     p_srs_cfg       = &(p_xran_dev_ctx->srs_cfg);
597
598     /* Only O-RU sends SRS U-Plane */
599     direction   = XRAN_DIR_UL;
600     staticEn    = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
601     antElm_eAxC_id  = ant_id + p_srs_cfg->eAxC_offset;
602
603     prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFHSrsRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers->pData;
604     if(prb_map)
605     {
606         for(elmIdx = 0; elmIdx < prb_map->nPrbElm && elmIdx < XRAN_MAX_SECTIONS_PER_SLOT; elmIdx++)
607         {
608             struct xran_prb_elm *prb_map_elm = &prb_map->prbMap[elmIdx];
609             struct xran_section_desc * p_sec_desc = NULL;
610
611             if(prb_map_elm == NULL)
612                 rte_panic("p_sec_desc == NULL\n");
613
614             sym_id  = prb_map->prbMap[elmIdx].nStartSymb;
615             pos     = (char*)p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
616             mb      = (void*)p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
617
618
619             p_share_data    = &p_xran_dev_ctx->srs_share_data.sh_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id];
620             p_sec_desc      = &prb_map_elm->sec_desc[sym_id][0];
621             p_sec_iq        = ((char*)pos + p_sec_desc->iq_buffer_offset);
622
623             /* calculate offset for external buffer */
624             ext_buff_len = p_sec_desc->iq_buffer_len;
625
626             ext_buff = p_sec_iq - (RTE_PKTMBUF_HEADROOM +
627                             sizeof (struct xran_ecpri_hdr) +
628                             sizeof (struct radio_app_common_hdr) +
629                             sizeof(struct data_section_hdr));
630
631             ext_buff_len += RTE_PKTMBUF_HEADROOM +
632                             sizeof (struct xran_ecpri_hdr) +
633                             sizeof (struct radio_app_common_hdr) +
634                             sizeof(struct data_section_hdr) + 18;
635
636             if((prb_map_elm->compMethod != XRAN_COMPMETHOD_NONE)
637                 && (staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC))
638             {
639                 ext_buff     -= sizeof (struct data_section_compression_hdr);
640                 ext_buff_len += sizeof (struct data_section_compression_hdr);
641             }
642
643             eth_oran_hdr = xran_ethdi_mbuf_indir_alloc();
644             if(unlikely(eth_oran_hdr == NULL))
645                 rte_panic("Failed rte_pktmbuf_alloc\n");
646
647             p_share_data->free_cb = extbuf_free_callback;
648             p_share_data->fcb_opaque = NULL;
649             rte_mbuf_ext_refcnt_set(p_share_data, 1);
650
651             ext_buff_iova = rte_mempool_virt2iova(mb);
652             if(unlikely(ext_buff_iova == 0 || ext_buff_iova == RTE_BAD_IOVA))
653                 rte_panic("Failed rte_mem_virt2iova : %lu\n", ext_buff_iova);
654
655             rte_pktmbuf_attach_extbuf(eth_oran_hdr,
656                                       ext_buff,
657                                       ext_buff_iova + RTE_PTR_DIFF(ext_buff , mb),
658                                       ext_buff_len,
659                                       p_share_data);
660
661             rte_pktmbuf_reset_headroom(eth_oran_hdr);
662
663             tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(eth_oran_hdr, sizeof(struct rte_ether_hdr));
664             if(unlikely(tmp == NULL))
665                 rte_panic("Failed rte_pktmbuf_prepend \n");
666
667             uint8_t seq_id = xran_get_upul_seqid(pHandle, cc_id, antElm_eAxC_id);
668
669             num_sections = (prb_map_elm->bf_weight.extType == 1) ? prb_map_elm->bf_weight.numSetBFWs : 1 ;
670
671             prepare_symbol_ex(direction, prb_map_elm->nSectId,
672                               (void *)eth_oran_hdr, (uint8_t *)p_sec_iq,
673                               prb_map_elm->compMethod, prb_map_elm->iqWidth,
674                               p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
675                               frame_id, subframe_id, slot_id, sym_id,
676                               prb_map_elm->UP_nRBStart, prb_map_elm->UP_nRBSize,
677                               cc_id, antElm_eAxC_id,
678                               seq_id,
679                               0,
680                               staticEn,
681                               num_sections,
682                               0);
683
684             section_id += num_sections;
685
686             rte_mbuf_sanity_check(eth_oran_hdr, 0);
687
688             vf_id = xran_map_ecpriPcid_to_vf(p_xran_dev_ctx, direction, cc_id, antElm_eAxC_id);
689             pCnt->tx_counter++;
690             pCnt->tx_bytes_counter += rte_pktmbuf_pkt_len(eth_oran_hdr);
691             p_xran_dev_ctx->send_upmbuf2ring(eth_oran_hdr, ETHER_TYPE_ECPRI, vf_id);
692         } /* for(elmIdx = 0; elmIdx < prb_map->nPrbElm && elmIdx < XRAN_MAX_SECTIONS_PER_SLOT; elmIdx++) */
693     } /* if(prb_map) */
694     else
695     {
696         printf("(%d %d %d) prb_map == NULL\n", tti % XRAN_N_FE_BUF_LEN, cc_id, antElm_eAxC_id);
697     }
698
699     return retval;
700 }
701
702 struct rte_mbuf *
703 xran_attach_up_ext_buf(uint16_t vf_id, int8_t* p_ext_buff_start, int8_t* p_ext_buff, uint16_t ext_buff_len,
704                 struct rte_mbuf_ext_shared_info * p_share_data,
705                 enum xran_compression_method compMeth, enum xran_comp_hdr_type staticEn)
706 {
707     struct rte_mbuf *mb_oran_hdr_ext = NULL;
708     struct rte_mbuf *tmp             = NULL;
709     int8_t          *ext_buff        = NULL;
710     rte_iova_t ext_buff_iova         = 0;
711     ext_buff =      p_ext_buff - (RTE_PKTMBUF_HEADROOM +
712                     sizeof(struct xran_ecpri_hdr) +
713                     sizeof(struct radio_app_common_hdr) +
714                     sizeof(struct data_section_hdr));
715
716     ext_buff_len += RTE_PKTMBUF_HEADROOM +
717                     sizeof(struct xran_ecpri_hdr) +
718                     sizeof(struct radio_app_common_hdr) +
719                     sizeof(struct data_section_hdr) + 18;
720     if ((compMeth != XRAN_COMPMETHOD_NONE)&&(staticEn == XRAN_COMP_HDR_TYPE_DYNAMIC)) {
721         ext_buff     -= sizeof (struct data_section_compression_hdr);
722         ext_buff_len += sizeof (struct data_section_compression_hdr);
723     }
724     mb_oran_hdr_ext =  rte_pktmbuf_alloc(_eth_mbuf_pool_vf_small[vf_id]);
725
726     if (unlikely (( mb_oran_hdr_ext) == NULL)) {
727         rte_panic("[core %d]Failed rte_pktmbuf_alloc on vf %d\n", rte_lcore_id(), vf_id);
728     }
729
730     p_share_data->free_cb = extbuf_free_callback;
731     p_share_data->fcb_opaque = NULL;
732     rte_mbuf_ext_refcnt_set(p_share_data, 1);
733
734     ext_buff_iova = rte_mempool_virt2iova(p_ext_buff_start);
735     if (unlikely (( ext_buff_iova) == 0)) {
736         rte_panic("Failed rte_mem_virt2iova \n");
737     }
738
739     if (unlikely (( (rte_iova_t)ext_buff_iova) == RTE_BAD_IOVA)) {
740         rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
741     }
742
743     rte_pktmbuf_attach_extbuf(mb_oran_hdr_ext,
744                               ext_buff,
745                               ext_buff_iova + RTE_PTR_DIFF(ext_buff , p_ext_buff_start),
746                               ext_buff_len,
747                               p_share_data);
748
749     rte_pktmbuf_reset_headroom(mb_oran_hdr_ext);
750
751     tmp = (struct rte_mbuf *)rte_pktmbuf_prepend(mb_oran_hdr_ext, sizeof(struct rte_ether_hdr));
752     if (unlikely (( tmp) == NULL)) {
753         rte_panic("Failed rte_pktmbuf_prepend \n");
754     }
755
756     return mb_oran_hdr_ext;
757 }
758
759 int32_t xran_process_tx_sym_cp_on_dispatch_opt(void* pHandle, uint8_t ctx_id, uint32_t tti, int32_t start_cc, int32_t num_cc, int32_t start_ant, int32_t num_ant, uint32_t frame_id,
760     uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id, enum xran_comp_hdr_type compType, enum xran_pkt_dir direction,
761     uint16_t xran_port_id, PSECTION_DB_TYPE p_sec_db)
762 {
763     int32_t     retval = 0;
764     struct cp_up_tx_desc*   p_desc = NULL;
765     struct xran_ethdi_ctx*  eth_ctx = xran_ethdi_get_ctx();
766     struct xran_device_ctx* p_xran_dev_ctx = (struct xran_device_ctx*)pHandle;
767
768     p_desc = xran_pkt_gen_desc_alloc();
769     if(p_desc) {
770         p_desc->pHandle     = pHandle;
771         p_desc->ctx_id      = ctx_id;
772         p_desc->tti         = tti;
773         p_desc->start_cc    = start_cc;
774         p_desc->cc_num      = num_cc;
775         p_desc->start_ant   = start_ant;
776         p_desc->ant_num     = num_ant;
777         p_desc->frame_id    = frame_id;
778         p_desc->subframe_id = subframe_id;
779         p_desc->slot_id     = slot_id;
780         p_desc->sym_id      = sym_id;
781         p_desc->compType    = (uint32_t)compType;
782         p_desc->direction   = (uint32_t)direction;
783         p_desc->xran_port_id    = xran_port_id;
784         p_desc->p_sec_db = (void*)p_sec_db;
785
786         if(likely(p_xran_dev_ctx->xran_port_id < XRAN_PORTS_NUM)) {
787             if (rte_ring_enqueue(eth_ctx->up_dl_pkt_gen_ring[p_xran_dev_ctx->xran_port_id], p_desc->mb) == 0)
788                 return 1;   /* success */
789             else
790                 xran_pkt_gen_desc_free(p_desc);
791         } else {
792             rte_panic("incorrect port %d", p_xran_dev_ctx->xran_port_id);
793         }
794     } else {
795         print_dbg("xran_pkt_gen_desc_alloc failure %d", p_xran_dev_ctx->xran_port_id);
796     }
797
798     return retval;
799 }
800
801 int32_t
802 xran_process_tx_sym_cp_on_dispatch(void *pHandle, uint8_t ctx_id, uint32_t tti, int32_t start_cc, int32_t num_cc, int32_t start_ant, int32_t num_ant, uint32_t frame_id, uint32_t subframe_id,
803                                    uint32_t slot_id, uint32_t sym_id)
804 {
805     int32_t     retval = 0;
806     struct cp_up_tx_desc*   p_desc = NULL;
807     struct xran_ethdi_ctx*  eth_ctx = xran_ethdi_get_ctx();
808     struct xran_device_ctx* p_xran_dev_ctx = (struct xran_device_ctx*)pHandle;
809
810     p_desc = xran_pkt_gen_desc_alloc();
811     if(p_desc) {
812         p_desc->pHandle     = pHandle;
813         p_desc->ctx_id      = ctx_id;
814         p_desc->tti         = tti;
815         p_desc->start_cc    = start_cc;
816         p_desc->cc_num       = num_cc;
817         p_desc->start_ant    = start_ant;
818         p_desc->ant_num      = num_ant;
819         p_desc->frame_id    = frame_id;
820         p_desc->subframe_id = subframe_id;
821         p_desc->slot_id     = slot_id;
822         p_desc->sym_id      = sym_id;
823
824         if(likely(p_xran_dev_ctx->xran_port_id < XRAN_PORTS_NUM)) {
825             if (rte_ring_enqueue(eth_ctx->up_dl_pkt_gen_ring[p_xran_dev_ctx->xran_port_id], p_desc->mb) == 0)
826                 return 1;   /* success */
827             else
828                 xran_pkt_gen_desc_free(p_desc);
829         } else {
830             rte_panic("incorrect port %d", p_xran_dev_ctx->xran_port_id);
831         }
832     } else {
833         print_dbg("xran_pkt_gen_desc_alloc failure %d", p_xran_dev_ctx->xran_port_id);
834     }
835
836     return retval;
837 }
838
839 int32_t
840 xran_process_tx_sym_cp_on(void *pHandle, uint8_t ctx_id, uint32_t tti, int32_t start_cc, int32_t cc_id, int32_t start_ant, int32_t ant_id, uint32_t frame_id, uint32_t subframe_id,
841     uint32_t slot_id, uint32_t sym_id)
842 {
843     int32_t     retval = 0;
844     uint16_t    ext_buff_len = 0;
845     char        *pos      = NULL;
846     char        *p_sec_iq = NULL;
847     void        *mb  = NULL;
848     struct rte_mbuf *to_free_mbuf =  NULL;
849     //int         prb_num = 0;
850     uint16_t    iq_sample_size_bits = 16;
851     uint32_t    next = 0;
852     int32_t     num_sections = 0;
853     uint16_t    len  = 0;
854     int16_t     len2 = 0;
855     uint16_t    i    = 0;
856     struct mbuf_table  loc_tx_mbufs;
857     struct xran_up_pkt_gen_params loc_xp;
858
859     struct xran_section_info *sectinfo = NULL;
860     struct xran_device_ctx   *p_xran_dev_ctx = (struct xran_device_ctx*)pHandle;
861     enum xran_pkt_dir direction;
862     uint16_t vf_id = 0;
863     enum xran_comp_hdr_type compType = XRAN_COMP_HDR_TYPE_DYNAMIC;
864
865     struct rte_mbuf_ext_shared_info * p_share_data = NULL;
866
867     if (p_xran_dev_ctx != NULL)
868     {
869         compType = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
870
871         if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU) {
872             direction = XRAN_DIR_DL; /* O-DU */
873             //prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
874         } else {
875             direction = XRAN_DIR_UL; /* RU */
876             //prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
877         }
878
879         vf_id = xran_map_ecpriPcid_to_vf(p_xran_dev_ctx, direction, cc_id, ant_id);
880         next = 0;
881         num_sections = xran_cp_getsize_section_info(pHandle, direction, cc_id, ant_id, ctx_id);
882         /* iterate C-Plane configuration to generate corresponding U-Plane */
883         if(num_sections)
884             prepare_sf_slot_sym(direction, frame_id, subframe_id, slot_id, sym_id, &loc_xp);
885
886         loc_tx_mbufs.len = 0;
887         while(next < num_sections) {
888             sectinfo = xran_cp_iterate_section_info(pHandle, direction, cc_id, ant_id, ctx_id, &next);
889
890             if(sectinfo == NULL)
891                 break;
892
893             if(sectinfo->type != XRAN_CP_SECTIONTYPE_1) {   /* only supports type 1 */
894                 print_err("Invalid section type in section DB - %d", sectinfo->type);
895                 continue;
896             }
897
898             /* skip, if not scheduled */
899             if(sym_id < sectinfo->startSymId || sym_id >= sectinfo->startSymId + sectinfo->numSymbol)
900                 continue;
901
902
903             if(sectinfo->compMeth)
904                 iq_sample_size_bits = sectinfo->iqWidth;
905
906             print_dbg(">>> sym %2d [%d] type%d id %d startPrbc=%d numPrbc=%d startSymId=%d numSymbol=%d\n", sym_id, next,
907                         sectinfo->type, sectinfo->id, sectinfo->startPrbc,
908                         sectinfo->numPrbc,sectinfo->startSymId, sectinfo->numSymbol);
909
910             p_share_data = &p_xran_dev_ctx->share_data.sh_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sectinfo->id];
911
912             len  = loc_tx_mbufs.len;
913             len2 = 0;
914             i    = 0;
915
916             //Added for Klocworks
917             if (len >= MBUF_TABLE_SIZE) {
918                 len = MBUF_TABLE_SIZE - 1;
919                 rte_panic("len >= MBUF_TABLE_SIZE\n");
920             }
921
922             to_free_mbuf  = p_xran_dev_ctx->to_free_mbuf[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sym_id][sectinfo->id];
923             pos = (char*) p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData;
924             mb  = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
925
926             if(mb == NULL) {
927                 rte_panic("mb == NULL\n");
928             }
929
930             p_sec_iq     = ((char*)pos + sectinfo->sec_desc[sym_id].iq_buffer_offset);
931             ext_buff_len = sectinfo->sec_desc[sym_id].iq_buffer_len;
932
933             mb = xran_attach_up_ext_buf(vf_id, (int8_t *)mb, (int8_t *) p_sec_iq,
934                                 (uint16_t) ext_buff_len,
935                                     p_share_data, (enum xran_compression_method) sectinfo->compMeth, compType);
936             p_xran_dev_ctx->to_free_mbuf[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sym_id][sectinfo->id] =  mb;
937             rte_pktmbuf_refcnt_update(mb, 1); /* make sure eth won't free our mbuf */
938
939             if(to_free_mbuf) {
940                 rte_pktmbuf_free(to_free_mbuf);
941             }
942
943             /* first all PRBs */
944             prepare_symbol_opt(direction, sectinfo->id,
945                             mb,
946                             (struct rb_map *)p_sec_iq,
947                             sectinfo->compMeth,
948                             sectinfo->iqWidth,
949                             p_xran_dev_ctx->fh_cfg.ru_conf.byteOrder,
950                                 sectinfo->startPrbc,
951                                 sectinfo->numPrbc,
952                                 cc_id,
953                                 ant_id,
954                             xran_get_updl_seqid(pHandle, cc_id, ant_id),
955                             0,
956                                 &loc_xp,
957                                 compType);
958
959             /* if we don't need to do any fragmentation */
960             if (likely (p_xran_dev_ctx->fh_init.mtu >=
961                             sectinfo->numPrbc * (3*iq_sample_size_bits + 1))) {
962                 /* no fragmentation */
963                 loc_tx_mbufs.m_table[len] = mb;
964                 len2 = 1;
965             } else {
966                 /* current code should not go to fragmentation as it should be taken care of by section allocation already */
967                 print_err("should not go to fragmentation mtu %d packet size %d\n", p_xran_dev_ctx->fh_init.mtu, sectinfo->numPrbc * (3*iq_sample_size_bits + 1));
968                 return 0;
969             }
970             if(len2 > 1){
971                 for (i = len; i < len + len2; i ++) {
972                     struct rte_mbuf *m;
973                     m = loc_tx_mbufs.m_table[i];
974                     struct rte_ether_hdr *eth_hdr = (struct rte_ether_hdr *)
975                         rte_pktmbuf_prepend(m, (uint16_t)sizeof(struct rte_ether_hdr));
976                     if (eth_hdr == NULL) {
977                         rte_panic("No headroom in mbuf.\n");
978                     }
979                 }
980             }
981
982             len += len2;
983             if (unlikely(len > XRAN_MAX_PKT_BURST_PER_SYM)) {
984                 rte_panic("XRAN_MAX_PKT_BURST_PER_SYM\n");
985             }
986             loc_tx_mbufs.len = len;
987         } /* while(section) */
988
989         /* Transmit packets */
990         xran_send_burst(p_xran_dev_ctx, &loc_tx_mbufs, vf_id);
991         loc_tx_mbufs.len = 0;
992         retval = 1;
993     }
994
995     return retval;
996 }
997
998 int32_t
999 xran_prepare_up_dl_sym(uint16_t xran_port_id, uint32_t nSlotIdx,  uint32_t nCcStart, uint32_t nCcNum, uint32_t nSymMask, uint32_t nAntStart,
1000                             uint32_t nAntNum, uint32_t nSymStart, uint32_t nSymNum)
1001 {
1002     int32_t     retval = 0;
1003     uint32_t    tti=0;
1004     uint32_t    numSlotMu1 = 5;
1005 #if XRAN_MLOG_VAR
1006     uint32_t    mlogVar[15];
1007     uint32_t    mlogVarCnt = 0;
1008 #endif
1009     unsigned long t1 = MLogXRANTick();
1010
1011     void        *pHandle = NULL;
1012     int32_t     ant_id   = 0;
1013     int32_t     cc_id    = 0;
1014     uint8_t     num_eAxc = 0;
1015     uint8_t     num_eAxc_prach = 0;
1016     uint8_t     num_eAxAntElm = 0;
1017     uint8_t     num_CCPorts = 0;
1018     uint32_t    frame_id    = 0;
1019     uint32_t    subframe_id = 0;
1020     uint32_t    slot_id     = 0;
1021     uint32_t    sym_id      = 0;
1022     uint32_t    sym_idx_to_send  = 0;
1023     uint32_t    idxSym;
1024     uint8_t     ctx_id;
1025     enum xran_in_period inPeriod;
1026     uint32_t interval;
1027     uint8_t PortId;
1028     struct xran_device_ctx * p_xran_dev_ctx = NULL;
1029
1030     p_xran_dev_ctx = xran_dev_get_ctx_by_id(xran_port_id);
1031
1032     if(p_xran_dev_ctx == NULL)
1033         return 0;
1034
1035     if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
1036         return 0;
1037
1038     interval = p_xran_dev_ctx->interval_us_local;
1039     PortId = p_xran_dev_ctx->xran_port_id;
1040
1041     pHandle =  p_xran_dev_ctx;
1042
1043     for (idxSym = nSymStart; idxSym < (nSymStart + nSymNum) && idxSym < XRAN_NUM_OF_SYMBOL_PER_SLOT; idxSym++) {
1044         t1 = MLogXRANTick();
1045         if(((1 << idxSym) & nSymMask) ) {
1046             sym_idx_to_send = nSlotIdx*XRAN_NUM_OF_SYMBOL_PER_SLOT + idxSym;
1047             XranOffsetSym(p_xran_dev_ctx->sym_up, sym_idx_to_send, XRAN_NUM_OF_SYMBOL_PER_SLOT*SLOTNUM_PER_SUBFRAME(interval)*1000, &inPeriod);
1048             tti         = XranGetTtiNum(sym_idx_to_send, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1049             slot_id     = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME(interval));
1050             subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME(interval),  SUBFRAMES_PER_SYSTEMFRAME);
1051
1052             uint16_t sfnSecStart = xran_getSfnSecStart();
1053             if(unlikely(inPeriod == XRAN_IN_NEXT_PERIOD))
1054             {
1055                 // For DU
1056                 sfnSecStart = (sfnSecStart + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
1057             }
1058             else if(unlikely(inPeriod == XRAN_IN_PREV_PERIOD))
1059             {
1060                 // For RU
1061                 if (sfnSecStart >= NUM_OF_FRAMES_PER_SECOND)
1062                 {
1063                     sfnSecStart -= NUM_OF_FRAMES_PER_SECOND;
1064                 }
1065                 else
1066                 {
1067                     sfnSecStart += NUM_OF_FRAMES_PER_SFN_PERIOD - NUM_OF_FRAMES_PER_SECOND;
1068                 }
1069             }
1070             frame_id    = XranGetFrameNum(tti,sfnSecStart,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME(interval));
1071             // ORAN frameId, 8 bits, [0, 255]
1072             frame_id = (frame_id & 0xff);
1073
1074             sym_id      = XranGetSymNum(sym_idx_to_send, XRAN_NUM_OF_SYMBOL_PER_SLOT);
1075             ctx_id      = tti % XRAN_MAX_SECTIONDB_CTX;
1076
1077             print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
1078
1079 #if XRAN_MLOG_VAR
1080             mlogVarCnt = 0;
1081             mlogVar[mlogVarCnt++] = 0xAAAAAAAA;
1082             mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx[PortId];
1083             mlogVar[mlogVarCnt++] = idxSym;
1084             mlogVar[mlogVarCnt++] = abs(p_xran_dev_ctx->sym_up);
1085             mlogVar[mlogVarCnt++] = tti;
1086             mlogVar[mlogVarCnt++] = frame_id;
1087             mlogVar[mlogVarCnt++] = subframe_id;
1088             mlogVar[mlogVarCnt++] = slot_id;
1089             mlogVar[mlogVarCnt++] = sym_id;
1090             mlogVar[mlogVarCnt++] = PortId;
1091             MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
1092 #endif
1093             if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU
1094                     && xran_get_ru_category(pHandle) == XRAN_CATEGORY_B)
1095             {
1096                     num_eAxc    = xran_get_num_eAxcUl(pHandle);
1097             }
1098             else
1099             {
1100                     num_eAxc    = xran_get_num_eAxc(pHandle);
1101             }
1102
1103             num_eAxc_prach = ((num_eAxc > XRAN_MAX_PRACH_ANT_NUM)? XRAN_MAX_PRACH_ANT_NUM : num_eAxc);
1104             num_CCPorts = xran_get_num_cc(pHandle);
1105
1106             /* U-Plane */
1107             if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU && p_xran_dev_ctx->enableCP) {
1108                 enum xran_comp_hdr_type compType;
1109                 enum xran_pkt_dir direction;
1110                 //uint32_t prb_num;
1111                 uint32_t loc_ret = 1;
1112                 uint16_t xran_port_id;
1113                 PSECTION_DB_TYPE p_sec_db = NULL;
1114
1115                 compType = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
1116
1117                 if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)
1118                 {
1119                     direction = XRAN_DIR_DL; /* O-DU */
1120                     //prb_num = p_xran_dev_ctx->fh_cfg.nDLRBs;
1121                 }
1122                 else
1123                 {
1124                     direction = XRAN_DIR_UL; /* RU */
1125                     //prb_num = p_xran_dev_ctx->fh_cfg.nULRBs;
1126                 }
1127
1128                 if(unlikely(p_xran_dev_ctx->xran_port_id > XRAN_PORTS_NUM))
1129                 {
1130                     print_err("Invalid Port id - %d", p_xran_dev_ctx->xran_port_id);
1131                     loc_ret = 0;
1132                 }
1133
1134                 if(unlikely(ctx_id > XRAN_MAX_SECTIONDB_CTX))
1135                 {
1136                     print_err("Invalid Context id - %d", ctx_id);
1137                     loc_ret = 0;
1138                 }
1139
1140                 if(unlikely(direction > XRAN_DIR_MAX))
1141                 {
1142                     print_err("Invalid direction - %d", direction);
1143                     loc_ret = 0;
1144                 }
1145
1146                 if(unlikely(num_CCPorts > XRAN_COMPONENT_CARRIERS_MAX))
1147                 {
1148                     print_err("Invalid CC id - %d", num_CCPorts);
1149                     loc_ret = 0;
1150                 }
1151
1152                 if(unlikely(num_eAxc > (XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR)))
1153                 {
1154                     print_err("Invalid eAxC id - %d", num_eAxc);
1155                     loc_ret = 0;
1156                 }
1157
1158                 xran_port_id = p_xran_dev_ctx->xran_port_id;
1159                 p_sec_db = p_sectiondb[p_xran_dev_ctx->xran_port_id];
1160                 if(unlikely(p_sec_db == NULL))
1161                 {
1162                     print_err("p_sec_db == NULL\n");
1163                     loc_ret = 0;
1164                 }
1165
1166                 if (loc_ret) {
1167                     retval = xran_process_tx_sym_cp_on_opt(pHandle, ctx_id, tti,
1168                                         nCcStart, nCcNum, nAntStart, nAntNum, frame_id, subframe_id, slot_id, idxSym,
1169                                         compType, direction, xran_port_id, p_sec_db);
1170                 } else {
1171                     print_err("loc_ret %d\n", loc_ret);
1172                     retval = 0;
1173                 }
1174             } else {
1175                 for (ant_id = 0; ant_id < num_eAxc; ant_id++) {
1176                     for (cc_id = 0; cc_id < num_CCPorts; cc_id++) {
1177                         //struct xran_srs_config *p_srs_cfg = &(p_xran_dev_ctx->srs_cfg);
1178                         if(p_xran_dev_ctx->puschMaskEnable)
1179                         {
1180                             if((tti % numSlotMu1) != p_xran_dev_ctx->puschMaskSlot)
1181                                 retval = xran_process_tx_sym_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id, 0);
1182                         }
1183                         else
1184                             retval = xran_process_tx_sym_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id, 0);
1185
1186                         if(p_xran_dev_ctx->enablePrach && (ant_id < num_eAxc_prach) )
1187                         {
1188                             retval = xran_process_tx_prach_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id);
1189                         }
1190                     }
1191                 }
1192             }
1193
1194             /* SRS U-Plane, only for O-RU emulation with Cat B */
1195             if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU
1196                     && xran_get_ru_category(pHandle) == XRAN_CATEGORY_B
1197                     && p_xran_dev_ctx->enableSrs
1198                     && ((p_xran_dev_ctx->srs_cfg.symbMask >> idxSym)&1))
1199             {
1200                 struct xran_srs_config *pSrsCfg = &(p_xran_dev_ctx->srs_cfg);
1201
1202                 for(cc_id = 0; cc_id < num_CCPorts; cc_id++)
1203                 {
1204                     /* check special frame */
1205                     if((xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_SP) ==  1)
1206                         || (xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_UL) ==  1))
1207                     {
1208                         if(((tti % p_xran_dev_ctx->fh_cfg.frame_conf.nTddPeriod) == pSrsCfg->slot)
1209                             && (p_xran_dev_ctx->ndm_srs_scheduled == 0))
1210                         {
1211                             int elmIdx;
1212                             struct xran_prb_map *prb_map;
1213                             prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFHSrsRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][0].sBufferList.pBuffers->pData;
1214
1215                             /* if PRB map is present in first antenna, assume SRS might be scheduled. */
1216                             if(prb_map && prb_map->nPrbElm)
1217                             {
1218                                 /* NDM U-Plane is not enabled */
1219                                 if(pSrsCfg->ndm_offset == 0)
1220                                 {
1221
1222                                     if (prb_map->nPrbElm > 0)
1223                                     {
1224                                         /* Check symbol range in PRB Map */
1225                                         if(sym_id >= prb_map->prbMap[0].nStartSymb
1226                                             && sym_id < (prb_map->prbMap[0].nStartSymb + prb_map->prbMap[0].numSymb))
1227                                             for(ant_id=0; ant_id < xran_get_num_ant_elm(pHandle); ant_id++)
1228                                                 xran_process_tx_srs_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id);
1229                                     }
1230
1231                                 }
1232                                 /* NDM U-Plane is enabled, SRS U-Planes will be transmitted after ndm_offset (in slots) */
1233                                 else
1234                                 {
1235                                     p_xran_dev_ctx->ndm_srs_scheduled   = 1;
1236                                     p_xran_dev_ctx->ndm_srs_tti         = tti;
1237                                     p_xran_dev_ctx->ndm_srs_txtti       = (tti + pSrsCfg->ndm_offset)%2000;
1238                                     p_xran_dev_ctx->ndm_srs_schedperiod = pSrsCfg->slot;
1239                                 }
1240                             }
1241                         }
1242                     }
1243                     /* check SRS NDM UP has been scheduled in non special slots */
1244                     else if(p_xran_dev_ctx->ndm_srs_scheduled
1245                             && p_xran_dev_ctx->ndm_srs_txtti == tti)
1246                     {
1247                         int ndm_step;
1248                         uint32_t srs_tti, srsFrame, srsSubframe, srsSlot;
1249                         uint8_t  srsCtx;
1250
1251                         srs_tti     = p_xran_dev_ctx->ndm_srs_tti;
1252                         num_eAxAntElm = xran_get_num_ant_elm(pHandle);
1253                         ndm_step    = num_eAxAntElm / pSrsCfg->ndm_txduration;
1254
1255                         srsSlot     = XranGetSlotNum(srs_tti, SLOTNUM_PER_SUBFRAME(interval));
1256                         srsSubframe = XranGetSubFrameNum(srs_tti,SLOTNUM_PER_SUBFRAME(interval),  SUBFRAMES_PER_SYSTEMFRAME);
1257                         srsFrame    = XranGetFrameNum(srs_tti,sfnSecStart,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME(interval));
1258                         srsFrame    = (srsFrame & 0xff);
1259                         srsCtx      =  srs_tti % XRAN_MAX_SECTIONDB_CTX;
1260
1261                         if(sym_id < pSrsCfg->ndm_txduration)
1262                         {
1263                             for(ant_id=sym_id*ndm_step; ant_id < (sym_id+1)*ndm_step; ant_id++)
1264                                 xran_process_tx_srs_cp_off(pHandle, srsCtx, srs_tti, cc_id, ant_id, srsFrame, srsSubframe, srsSlot);
1265                         }
1266                         else
1267                         {
1268                             p_xran_dev_ctx->ndm_srs_scheduled   = 0;
1269                             p_xran_dev_ctx->ndm_srs_tti         = 0;
1270                             p_xran_dev_ctx->ndm_srs_txtti       = 0;
1271                             p_xran_dev_ctx->ndm_srs_schedperiod = 0;
1272                         }
1273                     }
1274                 }
1275             }
1276         }
1277         MLogXRANTask(PID_DISPATCH_TX_SYM, t1, MLogXRANTick());
1278     }
1279
1280     return retval;
1281 }
1282
1283
1284 static inline uint16_t
1285 xran_tx_sym_from_ring(struct xran_device_ctx* p_xran_dev_ctx, struct rte_ring *r, uint16_t vf_id)
1286 {
1287     struct rte_mbuf *mbufs[XRAN_MAX_MEM_IF_RING_SIZE];
1288     uint16_t dequeued, sent = 0;
1289     uint32_t remaining;
1290     //long t1 = MLogXRANTick();
1291
1292     dequeued = rte_ring_dequeue_burst(r, (void **)mbufs, XRAN_MAX_MEM_IF_RING_SIZE,
1293             &remaining);
1294     if (!dequeued)
1295         return 0;   /* Nothing to send. */
1296
1297     while (1) {
1298         //sent += p_xran_dev_ctx->send_upmbuf2ring(mbufs[sent], ETHER_TYPE_ECPRI, vf_id);
1299         sent += rte_eth_tx_burst(vf_id, 0, &mbufs[sent], dequeued - sent);
1300         if (sent == dequeued){
1301             // MLogXRANTask(PID_REQUEUE_TX_SYM, t1, MLogXRANTick());
1302             return remaining;
1303         }
1304     }
1305 }
1306
1307 int32_t
1308 xran_process_tx_sym_cp_on_ring(void* pHandle, uint8_t ctx_id, uint32_t tti, int32_t start_cc, int32_t num_cc, int32_t start_ant,  int32_t num_ant, uint32_t frame_id,
1309     uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id, enum xran_comp_hdr_type compType, enum xran_pkt_dir direction,
1310     uint16_t xran_port_id, PSECTION_DB_TYPE p_sec_db)
1311 {
1312     struct rte_ring *ring = NULL;
1313     struct xran_device_ctx* p_xran_dev_ctx = (struct xran_device_ctx*)pHandle;
1314     int32_t cc_id  = 0;
1315     int32_t ant_id = 0;
1316     uint16_t vf_id = 0;
1317
1318     for (cc_id = start_cc; cc_id < (start_cc + num_cc); cc_id++) {
1319         for (ant_id = start_ant; ant_id < (start_ant + num_ant); ant_id++) {
1320             vf_id = p_xran_dev_ctx->map2vf[direction][cc_id][ant_id][XRAN_UP_VF];
1321             ring    = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pRing;
1322             xran_tx_sym_from_ring(p_xran_dev_ctx, ring, vf_id);
1323         }
1324     }
1325     return 0;
1326 }
1327
1328 //#define TRANSMIT_BURST
1329 //#define ENABLE_DEBUG_COREDUMP
1330
1331 #define ETHER_TYPE_ECPRI_BE (0xFEAE)
1332
1333 int32_t
1334 xran_process_tx_sym_cp_on_opt(void* pHandle, uint8_t ctx_id, uint32_t tti, int32_t start_cc, int32_t num_cc, int32_t start_ant,  int32_t num_ant, uint32_t frame_id,
1335     uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id, enum xran_comp_hdr_type compType, enum xran_pkt_dir direction,
1336     uint16_t xran_port_id, PSECTION_DB_TYPE p_sec_db)
1337 {
1338     struct xran_up_pkt_gen_params *pxp;
1339     struct data_section_hdr *pDataSec;
1340     char* ext_buff;
1341     void  *mb_base;
1342     struct rte_ring *ring;
1343     char* pStart;
1344     struct xran_ethdi_ctx* eth_ctx = xran_ethdi_get_ctx();
1345     struct xran_section_info* sectinfo;
1346     struct xran_device_ctx* p_xran_dev_ctx = (struct xran_device_ctx*)pHandle;
1347     struct rte_mbuf_ext_shared_info* p_share_data;
1348     struct xran_sectioninfo_db* ptr_sect_elm = NULL;
1349     struct rte_mbuf* mb_oran_hdr_ext = NULL;
1350     struct xran_ecpri_hdr* ecpri_hdr = NULL;
1351     //uint16_t* __restrict pSrc = NULL;
1352     uint16_t* __restrict pDst = NULL;
1353
1354     uint16_t next;
1355     uint16_t ext_buff_len = 0;
1356     uint16_t iq_sample_size_bytes=0;
1357     uint16_t num_sections = 0, total_sections = 0;
1358     uint16_t n_bytes;
1359     uint16_t elm_bytes = 0;
1360     uint16_t section_id;
1361     uint16_t nPktSize=0;
1362     uint16_t cid;
1363     uint16_t vf_id;
1364     const int16_t rte_mempool_objhdr_size = sizeof(struct rte_mempool_objhdr);
1365     uint8_t seq_id = 0;
1366     uint8_t cc_id, ant_id;
1367
1368 #ifdef TRANSMIT_BURST
1369     uint16_t len = 0;
1370 #endif
1371     //uint16_t len2 = 0, len_frag = 0;
1372     uint8_t compMeth;
1373     uint8_t iqWidth;
1374     uint8_t parm_size;
1375 #ifdef TRANSMIT_BURST
1376     struct mbuf_table  loc_tx_mbufs;
1377     struct mbuf_table  loc_tx_mbufs_fragmented = {0};
1378 #endif
1379     uint8_t fragNeeded=0;
1380
1381     const uint8_t rte_ether_hdr_size = sizeof(struct rte_ether_hdr);
1382     uint8_t comp_head_upd = 0;
1383
1384     const uint8_t total_header_size = (RTE_PKTMBUF_HEADROOM +
1385         sizeof(struct xran_ecpri_hdr) +
1386         sizeof(struct radio_app_common_hdr) +
1387         sizeof(struct data_section_hdr));
1388
1389
1390     for (cc_id = start_cc; cc_id < (start_cc + num_cc); cc_id++)
1391     {
1392         for (ant_id = start_ant; ant_id < (start_ant + num_ant); ant_id++)
1393         {
1394             ptr_sect_elm = p_sec_db->p_sectiondb_elm[ctx_id][direction][cc_id][ant_id];
1395             if (unlikely(ptr_sect_elm == NULL)){
1396                 rte_panic("ptr_sect_elm == NULL\n");
1397                 return (0);
1398             }
1399
1400             if(0!=ptr_sect_elm->cur_index)
1401             {
1402                 num_sections = ptr_sect_elm->cur_index;
1403                 /* iterate C-Plane configuration to generate corresponding U-Plane */
1404                 vf_id = p_xran_dev_ctx->map2vf[direction][cc_id][ant_id][XRAN_UP_VF];
1405                 mb_base = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
1406                 ring    = p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pRing;
1407                 if (unlikely(mb_base == NULL))
1408                 {
1409                     rte_panic("mb == NULL\n");
1410                 }
1411                 cid = ((cc_id << p_xran_dev_ctx->eAxc_id_cfg.bit_ccId) & p_xran_dev_ctx->eAxc_id_cfg.mask_ccId) | ((ant_id << p_xran_dev_ctx->eAxc_id_cfg.bit_ruPortId) & p_xran_dev_ctx->eAxc_id_cfg.mask_ruPortId);
1412                 cid = rte_cpu_to_be_16(cid);
1413
1414 #ifdef TRANSMIT_BURST
1415                 loc_tx_mbufs.len = 0;
1416 #endif
1417                 //len_frag = 0;
1418 #pragma loop_count min=1, max=16
1419                 for (next=0; next< num_sections; next++)
1420                 {
1421                     sectinfo = &ptr_sect_elm->list[next];
1422
1423                     if (unlikely(sectinfo == NULL)) {
1424                         print_err("sectinfo == NULL\n");
1425                         break;
1426                     }
1427                     if (unlikely(sectinfo->type != XRAN_CP_SECTIONTYPE_1))
1428                     {   /* only supports type 1 */
1429                         print_err("Invalid section type in section DB - %d", sectinfo->type);
1430                         continue;
1431                     }
1432                     /* skip, if not scheduled */
1433                     if (unlikely(sym_id < sectinfo->startSymId || sym_id >= sectinfo->startSymId + sectinfo->numSymbol))
1434                         continue;
1435
1436                     compMeth = sectinfo->compMeth;
1437                     iqWidth = sectinfo->iqWidth;
1438                     section_id = sectinfo->id;
1439
1440                     comp_head_upd = ((compMeth != XRAN_COMPMETHOD_NONE) && (compType == XRAN_COMP_HDR_TYPE_DYNAMIC));
1441
1442                     if(sectinfo->prbElemBegin || p_xran_dev_ctx->RunSlotPrbMapBySymbolEnable)
1443                     {
1444                         if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU)
1445                             seq_id = xran_updl_seq_id_num[xran_port_id][cc_id][ant_id]++;
1446                         else
1447                             seq_id = xran_upul_seq_id_num[xran_port_id][cc_id][ant_id]++;
1448                         iq_sample_size_bytes = 18 +   sizeof(struct xran_ecpri_hdr) +
1449                                 sizeof(struct radio_app_common_hdr);
1450                     }
1451
1452
1453                     if (compMeth)
1454                     {
1455                         iq_sample_size_bytes += sizeof(struct data_section_hdr) ;
1456
1457                         if (comp_head_upd)
1458                         {
1459                             iq_sample_size_bytes += sizeof(struct data_section_compression_hdr);
1460                         }
1461
1462                         iq_sample_size_bytes += sectinfo->numPrbc*(iqWidth*3 + 1);
1463                     }
1464
1465                     print_dbg(">>> sym %2d [%d] type%d id %d startPrbc=%d numPrbc=%d startSymId=%d numSymbol=%d\n", sym_id, next,
1466                             sectinfo->type, sectinfo->id, sectinfo->startPrbc,
1467                             sectinfo->numPrbc, sectinfo->startSymId, sectinfo->numSymbol);
1468
1469
1470 #ifdef TRANSMIT_BURST
1471                     len = loc_tx_mbufs.len;
1472                     //Added for Klocworks
1473                     if (unlikely(len >= MBUF_TABLE_SIZE))
1474                     {
1475                         len = MBUF_TABLE_SIZE - 1;
1476                         rte_panic("len >= MBUF_TABLE_SIZE\n");
1477                     }
1478 #endif
1479                     if(sectinfo->prbElemBegin || p_xran_dev_ctx->RunSlotPrbMapBySymbolEnable)
1480                     {
1481                         p_share_data = &p_xran_dev_ctx->share_data.sh_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][section_id];
1482                         p_share_data->free_cb = extbuf_free_callback;
1483                         p_share_data->fcb_opaque = NULL;
1484                         rte_mbuf_ext_refcnt_set(p_share_data, 1);
1485
1486                         /* Create ethernet + eCPRI + radio app header */
1487                         ext_buff_len = sectinfo->sec_desc[sym_id].iq_buffer_len;
1488
1489                         ext_buff = ((char*)p_xran_dev_ctx->sFrontHaulTxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData + sectinfo->sec_desc[sym_id].iq_buffer_offset) - total_header_size;
1490                         ext_buff_len += (total_header_size + 18);
1491
1492                         if (comp_head_upd)
1493                         {
1494                             ext_buff -= sizeof(struct data_section_compression_hdr);
1495                             ext_buff_len += sizeof(struct data_section_compression_hdr);
1496                         }
1497
1498                         mb_oran_hdr_ext = rte_pktmbuf_alloc(_eth_mbuf_pool_vf_small[vf_id]);
1499                         if (unlikely((mb_oran_hdr_ext) == NULL))
1500                         {
1501                             rte_panic("[core %d]Failed rte_pktmbuf_alloc on vf %d\n", rte_lcore_id(), vf_id);
1502                         }
1503
1504 #ifdef ENABLE_DEBUG_COREDUMP
1505                         if (unlikely((struct rte_mempool_objhdr*)RTE_PTR_SUB(mb_base, rte_mempool_objhdr_size)->iova == 0))
1506                         {
1507                             rte_panic("Failed rte_mem_virt2iova\n");
1508                         }
1509                         if (unlikely(((rte_iova_t)(struct rte_mempool_objhdr*)RTE_PTR_SUB(mb_base, rte_mempool_objhdr_size)->iova) == RTE_BAD_IOVA))
1510                         {
1511                             rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
1512                         }
1513 #endif
1514                         mb_oran_hdr_ext->buf_addr = ext_buff;
1515                         mb_oran_hdr_ext->buf_iova = ((struct rte_mempool_objhdr*)RTE_PTR_SUB(mb_base, rte_mempool_objhdr_size))->iova + RTE_PTR_DIFF(ext_buff, mb_base);
1516                         mb_oran_hdr_ext->buf_len = ext_buff_len;
1517                         mb_oran_hdr_ext->ol_flags |= EXT_ATTACHED_MBUF;
1518                         mb_oran_hdr_ext->shinfo = p_share_data;
1519                         mb_oran_hdr_ext->data_off = (uint16_t)RTE_MIN((uint16_t)RTE_PKTMBUF_HEADROOM, (uint16_t)mb_oran_hdr_ext->buf_len) - rte_ether_hdr_size;
1520                         mb_oran_hdr_ext->data_len = (uint16_t)(mb_oran_hdr_ext->data_len + rte_ether_hdr_size);
1521                         mb_oran_hdr_ext->pkt_len = mb_oran_hdr_ext->pkt_len + rte_ether_hdr_size;
1522                         mb_oran_hdr_ext->port = eth_ctx->io_cfg.port[vf_id];
1523
1524                         p_xran_dev_ctx->to_free_mbuf[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sym_id][section_id] = (void*)mb_oran_hdr_ext;
1525                         rte_pktmbuf_refcnt_update((void*)mb_oran_hdr_ext, 1); /* make sure eth won't free our mbuf */
1526                         if (p_xran_dev_ctx->to_free_mbuf[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sym_id][section_id])
1527                         {
1528                             rte_pktmbuf_free(p_xran_dev_ctx->to_free_mbuf[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id][sym_id][section_id]);
1529                         }
1530
1531                         pStart = (char*)((char*)mb_oran_hdr_ext->buf_addr + mb_oran_hdr_ext->data_off);
1532
1533                         /* Fill in the ethernet header. */
1534 #ifndef TRANSMIT_BURST
1535                         rte_eth_macaddr_get(mb_oran_hdr_ext->port, &((struct rte_ether_hdr*)pStart)->s_addr);         /* set source addr */
1536                         ((struct rte_ether_hdr*)pStart)->d_addr = eth_ctx->entities[vf_id][ID_O_RU];                  /* set dst addr */
1537                         ((struct rte_ether_hdr*)pStart)->ether_type = ETHER_TYPE_ECPRI_BE;                            /* ethertype */
1538 #endif
1539                         nPktSize = sizeof(struct rte_ether_hdr)
1540                                                 + sizeof(struct xran_ecpri_hdr)
1541                                                 + sizeof(struct radio_app_common_hdr) ;
1542
1543                         ecpri_hdr = (struct xran_ecpri_hdr*)(pStart + sizeof(struct rte_ether_hdr));
1544
1545                         ecpri_hdr->cmnhdr.data.data_num_1 = 0x0;
1546                         ecpri_hdr->cmnhdr.bits.ecpri_ver = XRAN_ECPRI_VER;
1547                         ecpri_hdr->cmnhdr.bits.ecpri_mesg_type = ECPRI_IQ_DATA;
1548
1549                         /* one to one lls-CU to RU only and band sector is the same */
1550                         ecpri_hdr->ecpri_xtc_id = cid;
1551
1552                         /* no transport layer fragmentation supported */
1553                         ecpri_hdr->ecpri_seq_id.data.data_num_1 = 0x8000;
1554                         ecpri_hdr->ecpri_seq_id.bits.seq_id = seq_id;
1555                         ecpri_hdr->cmnhdr.bits.ecpri_payl_size =  sizeof(struct radio_app_common_hdr) + XRAN_ECPRI_HDR_SZ; //xran_get_ecpri_hdr_size();;;
1556
1557                     } /* if(sectinfo->prbElemBegin) */
1558
1559                     /* Prepare U-Plane section hdr */
1560                     iqWidth = (iqWidth == 0) ? 16 : iqWidth;
1561                     switch (compMeth)
1562                     {
1563                     case XRAN_COMPMETHOD_BLKFLOAT:      parm_size = 1; break;
1564                     case XRAN_COMPMETHOD_MODULATION:    parm_size = 0; break;
1565                     default:
1566                         parm_size = 0;
1567                     }
1568
1569                     n_bytes = (3 * iqWidth + parm_size) * sectinfo->numPrbc; //Dont understand this
1570                     n_bytes = RTE_MIN(n_bytes, XRAN_MAX_MBUF_LEN);
1571
1572                     /* Ethernet & eCPRI added already */
1573                     nPktSize += sizeof(struct data_section_hdr) + n_bytes;
1574
1575                     if (comp_head_upd)
1576                         nPktSize += sizeof(struct data_section_compression_hdr);
1577
1578                     if(likely((ecpri_hdr!=NULL)))
1579                     {
1580                         ecpri_hdr->cmnhdr.bits.ecpri_payl_size += sizeof(struct data_section_hdr) + n_bytes ;
1581
1582                         if (comp_head_upd)
1583                             ecpri_hdr->cmnhdr.bits.ecpri_payl_size += sizeof(struct data_section_compression_hdr);
1584                     }
1585                     else
1586                     {
1587                         print_err("ecpri_hdr should not be NULL\n");
1588                     }
1589                     //ecpri_hdr->cmnhdr.bits.ecpri_payl_size += ecpri_payl_size;
1590
1591                     /* compression */
1592
1593                     if(sectinfo->prbElemBegin || p_xran_dev_ctx->RunSlotPrbMapBySymbolEnable)
1594                     {
1595                         pDst = (uint16_t*)(pStart + sizeof(struct rte_ether_hdr) + sizeof(struct xran_ecpri_hdr));
1596                         pxp = (struct xran_up_pkt_gen_params *)pDst;
1597                         /* radio app header */
1598                         pxp->app_params.data_feature.value = 0x10;
1599                         pxp->app_params.data_feature.data_direction = direction;
1600                         pxp->app_params.frame_id = frame_id;
1601                         pxp->app_params.sf_slot_sym.subframe_id = subframe_id;
1602                         pxp->app_params.sf_slot_sym.slot_id = slot_id;
1603                         pxp->app_params.sf_slot_sym.symb_id = sym_id;
1604                         /* convert to network byte order */
1605                         pxp->app_params.sf_slot_sym.value = rte_cpu_to_be_16(pxp->app_params.sf_slot_sym.value);
1606                         pDst += 2;
1607                     }
1608
1609                     pDataSec = (struct data_section_hdr *)pDst;
1610                     if(pDataSec){
1611                         pDataSec->fields.sect_id = section_id;
1612                         pDataSec->fields.num_prbu = (uint8_t)XRAN_CONVERT_NUMPRBC(sectinfo->numPrbc);
1613                         pDataSec->fields.start_prbu = (sectinfo->startPrbc & 0x03ff);
1614                         pDataSec->fields.sym_inc = 0;
1615                         pDataSec->fields.rb = 0;
1616                         /* network byte order */
1617                         pDataSec->fields.all_bits = rte_cpu_to_be_32(pDataSec->fields.all_bits);
1618                         pDst += 2;
1619                     }
1620                     else
1621                     {
1622                         print_err("pDataSec is NULL idx = %u num_sections = %u\n", next, num_sections);
1623                         // return 0;
1624                     }
1625
1626                     if (comp_head_upd)
1627                     {
1628                         if(pDst == NULL){
1629                             print_err("pDst == NULL\n");
1630                             return 0;
1631                         }
1632                         ((struct data_section_compression_hdr *)pDst)->ud_comp_hdr.ud_comp_meth = compMeth;
1633                         ((struct data_section_compression_hdr *)pDst)->ud_comp_hdr.ud_iq_width = XRAN_CONVERT_IQWIDTH(iqWidth);
1634                         ((struct data_section_compression_hdr *)pDst)->rsrvd = 0;
1635                         pDst++;
1636                     }
1637
1638                     //Increment by IQ data len
1639                     pDst = (uint16_t *)((uint8_t *)pDst + n_bytes) ;
1640                     if(mb_oran_hdr_ext){
1641                         rte_pktmbuf_pkt_len(mb_oran_hdr_ext) = nPktSize;
1642                         rte_pktmbuf_data_len(mb_oran_hdr_ext) = nPktSize;
1643                     }
1644
1645                     if(sectinfo->prbElemEnd || p_xran_dev_ctx->RunSlotPrbMapBySymbolEnable) /* Transmit the packet */
1646                     {
1647                         if(likely((ecpri_hdr!=NULL)))
1648                             ecpri_hdr->cmnhdr.bits.ecpri_payl_size = rte_cpu_to_be_16(ecpri_hdr->cmnhdr.bits.ecpri_payl_size);
1649                         else
1650                             print_err("ecpri_hdr should not be NULL\n");
1651                         /* if we don't need to do any fragmentation */
1652                         if (likely(p_xran_dev_ctx->fh_init.mtu >= (iq_sample_size_bytes)))
1653                         {
1654                             /* no fragmentation */
1655                             //len2 = 1;
1656 #ifdef TRANSMIT_BURST
1657                             loc_tx_mbufs.m_table[len++] = (void*)mb_oran_hdr_ext;
1658                             if (unlikely(len > XRAN_MAX_PKT_BURST_PER_SYM))
1659                             {
1660                                 rte_panic("XRAN_MAX_PKT_BURST_PER_SYM\n");
1661                             }
1662                             loc_tx_mbufs.len = len;
1663 #else
1664
1665                             if(p_xran_dev_ctx->fh_init.io_cfg.bbu_offload){
1666                                 rte_ring_enqueue(ring, mb_oran_hdr_ext);
1667                             } else {
1668                                 xran_enqueue_mbuf(mb_oran_hdr_ext, eth_ctx->tx_ring[vf_id]);
1669                             }
1670 #endif
1671                         }
1672                         else
1673                         {
1674                             /* current code should not go to fragmentation as it should be taken care of by section allocation already */
1675                             // print_err("should not go into fragmentation mtu %d packet size %d\n", p_xran_dev_ctx->fh_init.mtu, sectinfo->numPrbc * (3*iq_sample_size_bits + 1));
1676                             return 0;
1677                         }
1678                         elm_bytes += nPktSize;
1679                     } /* if(prbElemEnd) */
1680                 }/* section loop */
1681             } /* if ptr_sect_elm->cur_index */
1682
1683             total_sections += num_sections;
1684
1685             /* Transmit packets */
1686 #ifdef TRANSMIT_BURST
1687             if (loc_tx_mbufs.len)
1688             {
1689                 for (int32_t i = 0; i < loc_tx_mbufs.len; i++)
1690                 {
1691                     if(p_xran_dev_ctx->fh_init.io_cfg.bbu_offload){
1692                         rte_ring_enqueue(ring, loc_tx_mbufs_fragmented.m_table[i]);
1693                     } else {
1694                         p_xran_dev_ctx->send_upmbuf2ring(loc_tx_mbufs.m_table[i], ETHER_TYPE_ECPRI, vf_id);
1695                     }
1696                 }
1697                 loc_tx_mbufs.len = 0;
1698             }
1699 #endif
1700             /* Transmit fragmented packets */
1701             if (unlikely(fragNeeded))
1702             {
1703 #if 0   /* There is no logic populating loc_tx_mbufs_fragmented. hence disabling this code */
1704                 for (int32_t i = 0; i < loc_tx_mbufs_fragmented.len; i++)
1705                 {
1706                     if(p_xran_dev_ctx->fh_init.io_cfg.bbu_offload){
1707                         rte_ring_enqueue(ring, loc_tx_mbufs_fragmented.m_table[i]);
1708                     } else {
1709                         p_xran_dev_ctx->send_upmbuf2ring(loc_tx_mbufs_fragmented.m_table[i], ETHER_TYPE_ECPRI, vf_id);
1710                     }
1711                 }
1712 #endif
1713             }
1714         } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
1715     } /* for(ant_id = 0; ant_id < num_eAxc; ant_id++) */
1716
1717     struct xran_common_counters* pCnt = &p_xran_dev_ctx->fh_counters;
1718     pCnt->tx_counter += total_sections;
1719     pCnt->tx_bytes_counter += elm_bytes;
1720
1721     return 1;
1722 }
1723
1724 int32_t
1725 xran_process_tx_srs_cp_on(void* pHandle, uint8_t ctx_id, uint32_t tti, int32_t start_cc, int32_t num_cc, int32_t start_ant,  int32_t num_ant, uint32_t frame_id,
1726     uint32_t subframe_id, uint32_t slot_id, uint32_t sym_id, enum xran_comp_hdr_type compType, enum xran_pkt_dir direction,
1727     uint16_t xran_port_id, PSECTION_DB_TYPE p_sec_db)
1728 {
1729     struct xran_up_pkt_gen_params *pxp;
1730     struct data_section_hdr *pDataSec;
1731     int32_t antElm_eAxC_id = 0;//  = ant_id + p_srs_cfg->eAxC_offset;
1732
1733     struct xran_srs_config *p_srs_cfg;
1734
1735     char* ext_buff;
1736     void  *mb_base;
1737     char* pStart;
1738     struct xran_ethdi_ctx* eth_ctx = xran_ethdi_get_ctx();
1739     struct xran_section_info* sectinfo;
1740     struct xran_device_ctx* p_xran_dev_ctx = (struct xran_device_ctx*)pHandle;
1741     p_srs_cfg       = &(p_xran_dev_ctx->srs_cfg);
1742     struct rte_mbuf_ext_shared_info* p_share_data;
1743     struct xran_sectioninfo_db* ptr_sect_elm = NULL;
1744     struct rte_mbuf* mb_oran_hdr_ext = NULL;
1745     struct xran_ecpri_hdr* ecpri_hdr = NULL;
1746     uint16_t* __restrict pDst = NULL;
1747
1748     uint16_t next;
1749     uint16_t ext_buff_len = 0;
1750     uint16_t iq_sample_size_bytes=0;
1751     uint16_t num_sections = 0, total_sections = 0;
1752     uint16_t n_bytes;
1753     uint16_t elm_bytes = 0;
1754     uint16_t section_id;
1755     uint16_t nPktSize=0;
1756     uint16_t cid;
1757     uint16_t vf_id;
1758     const int16_t rte_mempool_objhdr_size = sizeof(struct rte_mempool_objhdr);
1759     uint8_t seq_id = 0;
1760     uint8_t cc_id, ant_id;
1761     uint8_t compMeth;
1762     uint8_t iqWidth;
1763     uint8_t parm_size;
1764
1765     const uint8_t rte_ether_hdr_size = sizeof(struct rte_ether_hdr);
1766     uint8_t comp_head_upd = 0;
1767
1768     const uint8_t total_header_size = (RTE_PKTMBUF_HEADROOM +
1769         sizeof(struct xran_ecpri_hdr) +
1770         sizeof(struct radio_app_common_hdr) +
1771         sizeof(struct data_section_hdr));
1772
1773     for (cc_id = start_cc; cc_id < (start_cc + num_cc); cc_id++)
1774     {
1775         for (ant_id = start_ant; ant_id < (start_ant + num_ant); ant_id++)
1776         {
1777             antElm_eAxC_id  = ant_id + p_srs_cfg->eAxC_offset;
1778             ptr_sect_elm = p_sec_db->p_sectiondb_elm[ctx_id][direction][cc_id][antElm_eAxC_id];
1779
1780             if (unlikely(ptr_sect_elm == NULL)){
1781                 printf("ant_id = %d ctx_id = %d,start_ant = %d, num_ant = %d, antElm_eAxC_id = %d\n",ant_id,ctx_id,start_ant,num_ant,antElm_eAxC_id);
1782                 rte_panic("ptr_sect_elm == NULL\n");
1783                 return (0);
1784             }
1785             if(0!=ptr_sect_elm->cur_index)
1786             {
1787                 num_sections = ptr_sect_elm->cur_index;
1788                 /* iterate C-Plane configuration to generate corresponding U-Plane */
1789                 vf_id = xran_map_ecpriPcid_to_vf(p_xran_dev_ctx, direction, cc_id, antElm_eAxC_id);//p_xran_dev_ctx->map2vf[direction][cc_id][antElm_eAxC_id][XRAN_UP_VF];
1790                 mb_base = p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pCtrl;
1791                 if (unlikely(mb_base == NULL))
1792                 {
1793                     rte_panic("mb == NULL\n");
1794                 }
1795                 cid = ((cc_id << p_xran_dev_ctx->eAxc_id_cfg.bit_ccId) & p_xran_dev_ctx->eAxc_id_cfg.mask_ccId) | ((antElm_eAxC_id << p_xran_dev_ctx->eAxc_id_cfg.bit_ruPortId) & p_xran_dev_ctx->eAxc_id_cfg.mask_ruPortId);
1796                 cid = rte_cpu_to_be_16(cid);
1797 #pragma loop_count min=1, max=16
1798                 for (next=0; next< num_sections; next++)
1799                 {
1800                     sectinfo = &ptr_sect_elm->list[next];
1801
1802                     if (unlikely(sectinfo == NULL)) {
1803                         print_err("sectinfo == NULL\n");
1804                         break;
1805                     }
1806                     if (unlikely(sectinfo->type != XRAN_CP_SECTIONTYPE_1))
1807                     {   /* only supports type 1 */
1808                         print_err("Invalid section type in section DB - %d", sectinfo->type);
1809                         continue;
1810                     }
1811                     /* skip, if not scheduled */
1812                     if (unlikely(sym_id < sectinfo->startSymId || sym_id >= sectinfo->startSymId + sectinfo->numSymbol))
1813                         continue;
1814                     compMeth = sectinfo->compMeth;
1815                     iqWidth = sectinfo->iqWidth;
1816                     section_id = sectinfo->id;
1817
1818                     comp_head_upd = ((compMeth != XRAN_COMPMETHOD_NONE) && (compType == XRAN_COMP_HDR_TYPE_DYNAMIC));
1819
1820                     if(sectinfo->prbElemBegin)
1821                     {
1822                         seq_id = xran_get_upul_seqid(pHandle, cc_id, antElm_eAxC_id);
1823                         iq_sample_size_bytes = 18 +   sizeof(struct xran_ecpri_hdr) +
1824                                 sizeof(struct radio_app_common_hdr);
1825                     }
1826
1827                     if (compMeth)
1828                     {
1829                         iq_sample_size_bytes += sizeof(struct data_section_hdr) ;
1830
1831                         if (comp_head_upd)
1832                         {
1833                             iq_sample_size_bytes += sizeof(struct data_section_compression_hdr);
1834                         }
1835
1836                         iq_sample_size_bytes += sectinfo->numPrbc*(iqWidth*3 + 1);
1837                     }
1838
1839                     print_dbg(">>> sym %2d [%d] type%d id %d startPrbc=%d numPrbc=%d startSymId=%d numSymbol=%d\n", sym_id, next,
1840                             sectinfo->type, sectinfo->id, sectinfo->startPrbc,
1841                             sectinfo->numPrbc, sectinfo->startSymId, sectinfo->numSymbol);
1842
1843                     if(sectinfo->prbElemBegin)
1844                     {
1845                         p_share_data    = &p_xran_dev_ctx->srs_share_data.sh_data[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id];
1846                         p_share_data->free_cb = extbuf_free_callback;
1847                         p_share_data->fcb_opaque = NULL;
1848                         rte_mbuf_ext_refcnt_set(p_share_data, 1);
1849
1850                         /* Create ethernet + eCPRI + radio app header */
1851                         ext_buff_len = sectinfo->sec_desc[sym_id].iq_buffer_len;
1852
1853                         ext_buff = ((char*)p_xran_dev_ctx->sFHSrsRxBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][ant_id].sBufferList.pBuffers[sym_id].pData + sectinfo->sec_desc[sym_id].iq_buffer_offset) - total_header_size;
1854                         ext_buff_len += (total_header_size + 18);
1855
1856                         if (comp_head_upd)
1857                         {
1858                             ext_buff -= sizeof(struct data_section_compression_hdr);
1859                             ext_buff_len += sizeof(struct data_section_compression_hdr);
1860                         }
1861
1862                         mb_oran_hdr_ext = xran_ethdi_mbuf_indir_alloc();
1863                         if (unlikely((mb_oran_hdr_ext) == NULL))
1864                         {
1865                             rte_panic("[core %d]Failed rte_pktmbuf_alloc on vf %d\n", rte_lcore_id(), vf_id);
1866                         }
1867
1868 #ifdef ENABLE_DEBUG_COREDUMP
1869                         if (unlikely((struct rte_mempool_objhdr*)RTE_PTR_SUB(mb_base, rte_mempool_objhdr_size)->iova == 0))
1870                         {
1871                             rte_panic("Failed rte_mem_virt2iova\n");
1872                         }
1873                         if (unlikely(((rte_iova_t)(struct rte_mempool_objhdr*)RTE_PTR_SUB(mb_base, rte_mempool_objhdr_size)->iova) == RTE_BAD_IOVA))
1874                         {
1875                             rte_panic("Failed rte_mem_virt2iova RTE_BAD_IOVA \n");
1876                         }
1877 #endif
1878                         mb_oran_hdr_ext->buf_addr = ext_buff;
1879                         mb_oran_hdr_ext->buf_iova = ((struct rte_mempool_objhdr*)RTE_PTR_SUB(mb_base, rte_mempool_objhdr_size))->iova + RTE_PTR_DIFF(ext_buff, mb_base);
1880                         mb_oran_hdr_ext->buf_len = ext_buff_len;
1881                         mb_oran_hdr_ext->ol_flags |= EXT_ATTACHED_MBUF;
1882                         mb_oran_hdr_ext->shinfo = p_share_data;
1883                         mb_oran_hdr_ext->data_off = (uint16_t)RTE_MIN((uint16_t)RTE_PKTMBUF_HEADROOM, (uint16_t)mb_oran_hdr_ext->buf_len) - rte_ether_hdr_size;
1884                         mb_oran_hdr_ext->data_len = (uint16_t)(mb_oran_hdr_ext->data_len + rte_ether_hdr_size);
1885                         mb_oran_hdr_ext->pkt_len = mb_oran_hdr_ext->pkt_len + rte_ether_hdr_size;
1886                         mb_oran_hdr_ext->port = eth_ctx->io_cfg.port[vf_id];
1887                         pStart = (char*)((char*)mb_oran_hdr_ext->buf_addr + mb_oran_hdr_ext->data_off);
1888
1889                         /* Fill in the ethernet header. */
1890                         rte_eth_macaddr_get(mb_oran_hdr_ext->port, &((struct rte_ether_hdr*)pStart)->s_addr);         /* set source addr */
1891                         ((struct rte_ether_hdr*)pStart)->d_addr = eth_ctx->entities[vf_id][ID_O_RU];                  /* set dst addr */
1892                         ((struct rte_ether_hdr*)pStart)->ether_type = ETHER_TYPE_ECPRI_BE;                            /* ethertype */
1893
1894                         nPktSize = sizeof(struct rte_ether_hdr)
1895                                                 + sizeof(struct xran_ecpri_hdr)
1896                                                 + sizeof(struct radio_app_common_hdr) ;
1897
1898                         ecpri_hdr = (struct xran_ecpri_hdr*)(pStart + sizeof(struct rte_ether_hdr));
1899
1900                         ecpri_hdr->cmnhdr.data.data_num_1 = 0x0;
1901                         ecpri_hdr->cmnhdr.bits.ecpri_ver = XRAN_ECPRI_VER;
1902                         ecpri_hdr->cmnhdr.bits.ecpri_mesg_type = ECPRI_IQ_DATA;
1903
1904                         /* one to one lls-CU to RU only and band sector is the same */
1905                         ecpri_hdr->ecpri_xtc_id = cid;
1906
1907                         /* no transport layer fragmentation supported */
1908                         ecpri_hdr->ecpri_seq_id.data.data_num_1 = 0x8000;
1909                         ecpri_hdr->ecpri_seq_id.bits.seq_id = seq_id;
1910                         ecpri_hdr->cmnhdr.bits.ecpri_payl_size =  sizeof(struct radio_app_common_hdr) + XRAN_ECPRI_HDR_SZ; //xran_get_ecpri_hdr_size();;;
1911
1912                     } /* if(sectinfo->prbElemBegin) */
1913
1914                     /* Prepare U-Plane section hdr */
1915                     iqWidth = (iqWidth == 0) ? 16 : iqWidth;
1916                     switch (compMeth)
1917                     {
1918                     case XRAN_COMPMETHOD_BLKFLOAT:      parm_size = 1; break;
1919                     case XRAN_COMPMETHOD_MODULATION:    parm_size = 0; break;
1920                     default:
1921                         parm_size = 0;
1922                     }
1923
1924                     n_bytes = (3 * iqWidth + parm_size) * sectinfo->numPrbc;
1925                     n_bytes = RTE_MIN(n_bytes, XRAN_MAX_MBUF_LEN);
1926
1927                     /* Ethernet & eCPRI added already */
1928                     nPktSize += sizeof(struct data_section_hdr) + n_bytes;
1929
1930                     if (comp_head_upd)
1931                         nPktSize += sizeof(struct data_section_compression_hdr);
1932
1933                     if(likely((ecpri_hdr!=NULL)))
1934                     {
1935                         ecpri_hdr->cmnhdr.bits.ecpri_payl_size += sizeof(struct data_section_hdr) + n_bytes ;
1936
1937                         if (comp_head_upd)
1938                             ecpri_hdr->cmnhdr.bits.ecpri_payl_size += sizeof(struct data_section_compression_hdr);
1939                     }
1940                     else
1941                     {
1942                         print_err("ecpri_hdr should not be NULL\n");
1943                     }
1944
1945                     if(sectinfo->prbElemBegin)
1946                     {
1947                         pDst = (uint16_t*)(pStart + sizeof(struct rte_ether_hdr) + sizeof(struct xran_ecpri_hdr));
1948                         pxp = (struct xran_up_pkt_gen_params *)pDst;
1949                         /* radio app header */
1950                         pxp->app_params.data_feature.value = 0x10;
1951                         pxp->app_params.data_feature.data_direction = direction;
1952                         pxp->app_params.frame_id = frame_id;
1953                         pxp->app_params.sf_slot_sym.subframe_id = subframe_id;
1954                         pxp->app_params.sf_slot_sym.slot_id = slot_id;
1955                         pxp->app_params.sf_slot_sym.symb_id = sym_id;
1956                         /* convert to network byte order */
1957                         pxp->app_params.sf_slot_sym.value = rte_cpu_to_be_16(pxp->app_params.sf_slot_sym.value);
1958                         pDst += 2;
1959                     }
1960
1961                     pDataSec = (struct data_section_hdr *)pDst;
1962                     if(pDataSec){
1963                         pDataSec->fields.sect_id = section_id;
1964                         pDataSec->fields.num_prbu = (uint8_t)XRAN_CONVERT_NUMPRBC(sectinfo->numPrbc);
1965                         pDataSec->fields.start_prbu = (sectinfo->startPrbc & 0x03ff);
1966                         pDataSec->fields.sym_inc = 0;
1967                         pDataSec->fields.rb = 0;
1968                         /* network byte order */
1969                         pDataSec->fields.all_bits = rte_cpu_to_be_32(pDataSec->fields.all_bits);
1970                         pDst += 2;
1971                     }
1972                     else
1973                     {
1974                         print_err("pDataSec is NULL idx = %u num_sections = %u\n", next, num_sections);
1975                         // return 0;
1976                     }
1977
1978                     if (comp_head_upd)
1979                     {
1980                         if(pDst == NULL){
1981                             print_err("pDst == NULL\n");
1982                             return 0;
1983                         }
1984                         ((struct data_section_compression_hdr *)pDst)->ud_comp_hdr.ud_comp_meth = compMeth;
1985                         ((struct data_section_compression_hdr *)pDst)->ud_comp_hdr.ud_iq_width = XRAN_CONVERT_IQWIDTH(iqWidth);
1986                         ((struct data_section_compression_hdr *)pDst)->rsrvd = 0;
1987                         pDst++;
1988                     }
1989
1990                     //Increment by IQ data len
1991                     pDst = (uint16_t *)((uint8_t *)pDst + n_bytes) ;
1992                     if(mb_oran_hdr_ext){
1993                         rte_pktmbuf_pkt_len(mb_oran_hdr_ext) = nPktSize;
1994                         rte_pktmbuf_data_len(mb_oran_hdr_ext) = nPktSize;
1995                     }
1996
1997                     if(sectinfo->prbElemEnd) /* Transmit the packet */
1998                     {
1999                         if(likely((ecpri_hdr!=NULL)))
2000                             ecpri_hdr->cmnhdr.bits.ecpri_payl_size = rte_cpu_to_be_16(ecpri_hdr->cmnhdr.bits.ecpri_payl_size);
2001                         else
2002                             print_err("ecpri_hdr should not be NULL\n");
2003                         /* if we don't need to do any fragmentation */
2004                         if (likely(p_xran_dev_ctx->fh_init.mtu >= (iq_sample_size_bytes)))
2005                         {
2006                             p_xran_dev_ctx->send_upmbuf2ring(mb_oran_hdr_ext, ETHER_TYPE_ECPRI, vf_id);
2007                         }
2008                         else
2009                         {
2010                             return 0;
2011                         }
2012                         elm_bytes += nPktSize;
2013                     } /* if(prbElemEnd) */
2014                 }/* section loop */
2015             } /* if ptr_sect_elm->cur_index */
2016             total_sections += num_sections;
2017         } /* for(cc_id = 0; cc_id < num_CCPorts; cc_id++) */
2018     } /* for(ant_id = 0; ant_id < num_eAxc; ant_id++) */
2019
2020     struct xran_common_counters* pCnt = &p_xran_dev_ctx->fh_counters;
2021     pCnt->tx_counter += total_sections;
2022     pCnt->tx_bytes_counter += elm_bytes;
2023     return 1;
2024 }
2025
2026
2027
2028 int32_t xran_process_tx_sym(void *arg)
2029 {
2030     int32_t     retval = 0;
2031     uint32_t    tti=0;
2032     uint32_t    numSlotMu1 = 5;
2033 #if XRAN_MLOG_VAR
2034     uint32_t    mlogVar[15];
2035     uint32_t    mlogVarCnt = 0;
2036 #endif
2037     unsigned long t1 = MLogXRANTick();
2038
2039     void        *pHandle = NULL;
2040     int32_t     ant_id   = 0;
2041     int32_t     cc_id    = 0;
2042     uint8_t     num_eAxc = 0;
2043     uint8_t     num_eAxc_prach = 0;
2044     uint8_t     num_eAxAntElm = 0;
2045     uint8_t     num_CCPorts = 0;
2046     uint32_t    frame_id    = 0;
2047     uint32_t    subframe_id = 0;
2048     uint32_t    slot_id     = 0;
2049     uint32_t    sym_id      = 0;
2050     uint32_t    sym_idx     = 0;
2051     uint8_t     ctx_id;
2052     struct xran_device_ctx * p_xran_dev_ctx = (struct xran_device_ctx *) arg;
2053     enum xran_in_period inPeriod;
2054     uint32_t interval = p_xran_dev_ctx->interval_us_local;
2055     uint8_t PortId = p_xran_dev_ctx->xran_port_id;
2056
2057     if(p_xran_dev_ctx->xran2phy_mem_ready == 0)
2058         return 0;
2059
2060     pHandle =  p_xran_dev_ctx;
2061
2062     /* O-RU: send symb after OTA time with delay (UL) */
2063     /* O-DU: send symb in advance of OTA time (DL) */
2064     sym_idx     = XranOffsetSym(p_xran_dev_ctx->sym_up, xran_lib_ota_sym_idx[PortId], XRAN_NUM_OF_SYMBOL_PER_SLOT*SLOTNUM_PER_SUBFRAME(interval)*1000, &inPeriod);
2065
2066     tti         = XranGetTtiNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2067     slot_id     = XranGetSlotNum(tti, SLOTNUM_PER_SUBFRAME(interval));
2068     subframe_id = XranGetSubFrameNum(tti,SLOTNUM_PER_SUBFRAME(interval),  SUBFRAMES_PER_SYSTEMFRAME);
2069
2070     uint16_t sfnSecStart = xran_getSfnSecStart();
2071     if(unlikely(inPeriod == XRAN_IN_NEXT_PERIOD))
2072     {
2073         // For DU
2074         sfnSecStart = (sfnSecStart + NUM_OF_FRAMES_PER_SECOND) & 0x3ff;
2075     }
2076     else if(unlikely(inPeriod == XRAN_IN_PREV_PERIOD))
2077     {
2078         // For RU
2079         if (sfnSecStart >= NUM_OF_FRAMES_PER_SECOND)
2080         {
2081             sfnSecStart -= NUM_OF_FRAMES_PER_SECOND;
2082         }
2083         else
2084         {
2085             sfnSecStart += NUM_OF_FRAMES_PER_SFN_PERIOD - NUM_OF_FRAMES_PER_SECOND;
2086         }
2087     }
2088     frame_id    = XranGetFrameNum(tti,sfnSecStart,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME(interval));
2089     // ORAN frameId, 8 bits, [0, 255]
2090     frame_id = (frame_id & 0xff);
2091
2092     sym_id      = XranGetSymNum(sym_idx, XRAN_NUM_OF_SYMBOL_PER_SLOT);
2093     ctx_id      = tti % XRAN_MAX_SECTIONDB_CTX;
2094
2095     print_dbg("[%d]SFN %d sf %d slot %d\n", tti, frame_id, subframe_id, slot_id);
2096
2097 #if XRAN_MLOG_VAR
2098     mlogVar[mlogVarCnt++] = 0xAAAAAAAA;
2099     mlogVar[mlogVarCnt++] = xran_lib_ota_sym_idx[PortId];
2100     mlogVar[mlogVarCnt++] = sym_idx;
2101     mlogVar[mlogVarCnt++] = abs(p_xran_dev_ctx->sym_up);
2102     mlogVar[mlogVarCnt++] = tti;
2103     mlogVar[mlogVarCnt++] = frame_id;
2104     mlogVar[mlogVarCnt++] = subframe_id;
2105     mlogVar[mlogVarCnt++] = slot_id;
2106     mlogVar[mlogVarCnt++] = sym_id;
2107     mlogVar[mlogVarCnt++] = PortId;
2108     MLogAddVariables(mlogVarCnt, mlogVar, MLogTick());
2109 #endif
2110
2111     if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU
2112             && xran_get_ru_category(pHandle) == XRAN_CATEGORY_B)
2113     {
2114             num_eAxc    = xran_get_num_eAxcUl(pHandle);
2115     }
2116     else
2117     {
2118             num_eAxc    = xran_get_num_eAxc(pHandle);
2119     }
2120
2121     num_eAxc_prach = ((num_eAxc > XRAN_MAX_PRACH_ANT_NUM)? XRAN_MAX_PRACH_ANT_NUM : num_eAxc);
2122     num_CCPorts = xran_get_num_cc(pHandle);
2123
2124     /* U-Plane */
2125     if(p_xran_dev_ctx->fh_init.io_cfg.id == O_DU && p_xran_dev_ctx->enableCP)
2126     {
2127         if(p_xran_dev_ctx->tx_sym_gen_func)
2128         {
2129             enum xran_comp_hdr_type compType;
2130             uint8_t loc_ret = 1;
2131             uint16_t xran_port_id;
2132             PSECTION_DB_TYPE p_sec_db = NULL;
2133
2134             compType = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
2135
2136             if(unlikely(p_xran_dev_ctx->xran_port_id > XRAN_PORTS_NUM))
2137             {
2138                 print_err("Invalid Port id - %d", p_xran_dev_ctx->xran_port_id);
2139                 loc_ret = 0;
2140             }
2141
2142             if(unlikely(ctx_id > XRAN_MAX_SECTIONDB_CTX))
2143             {
2144                 print_err("Invalid Context id - %d", ctx_id);
2145                 loc_ret = 0;
2146             }
2147
2148             if(unlikely(num_CCPorts > XRAN_COMPONENT_CARRIERS_MAX))
2149             {
2150                 print_err("Invalid CC id - %d", num_CCPorts);
2151                 loc_ret = 0;
2152             }
2153
2154             if(unlikely(num_eAxc > (XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR)))
2155             {
2156                 print_err("Invalid eAxC id - %d", num_eAxc);
2157                 loc_ret = 0;
2158             }
2159
2160             xran_port_id = p_xran_dev_ctx->xran_port_id;
2161             p_sec_db = p_sectiondb[xran_port_id];
2162
2163             if (loc_ret) {
2164                 p_xran_dev_ctx->tx_sym_gen_func(pHandle, ctx_id, tti,
2165                                     0, num_CCPorts, 0, num_eAxc, frame_id, subframe_id, slot_id, sym_id,
2166                                     compType, XRAN_DIR_DL, xran_port_id, p_sec_db);
2167             }
2168             else
2169             {
2170                 retval = 0;
2171             }
2172          }
2173          else
2174          {
2175             rte_panic("p_xran_dev_ctx->tx_sym_gen_func== NULL\n");
2176          }
2177     }
2178     else if(p_xran_dev_ctx->fh_init.io_cfg.id == O_RU && p_xran_dev_ctx->enableCP)
2179     {
2180         if(first_call) {
2181             enum xran_comp_hdr_type compType;
2182             uint16_t xran_port_id;
2183             PSECTION_DB_TYPE p_sec_db = NULL;
2184
2185             if(xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_UL) ==  1
2186                 || xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_SP) ==  1
2187                 || xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_FDD) ==  1){
2188
2189                 if(xran_fs_get_symbol_type(PortId, cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_UL
2190                     || xran_fs_get_symbol_type(PortId, cc_id, tti, sym_id) == XRAN_SYMBOL_TYPE_FDD){
2191
2192                     uint8_t loc_ret = 1;
2193                     compType = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
2194                     if(unlikely(p_xran_dev_ctx->xran_port_id > XRAN_PORTS_NUM))
2195                     {
2196                         print_err("Invalid Port id - %d", p_xran_dev_ctx->xran_port_id);
2197                         loc_ret = 0;
2198                     }
2199
2200                     if(unlikely(ctx_id > XRAN_MAX_SECTIONDB_CTX))
2201                     {
2202                         print_err("Invalid Context id - %d", ctx_id);
2203                         loc_ret = 0;
2204                     }
2205
2206                     if(unlikely(num_CCPorts > XRAN_COMPONENT_CARRIERS_MAX))
2207                     {
2208                         print_err("Invalid CC id - %d", num_CCPorts);
2209                         loc_ret = 0;
2210                     }
2211
2212                     if(unlikely(num_eAxc > (XRAN_MAX_ANTENNA_NR * 2 + XRAN_MAX_ANT_ARRAY_ELM_NR)))
2213                     {
2214                         print_err("Invalid eAxC id - %d", num_eAxc);
2215                         loc_ret = 0;
2216                     }
2217
2218                     xran_port_id = p_xran_dev_ctx->xran_port_id;
2219                     p_sec_db = p_sectiondb[xran_port_id];
2220
2221                     if (loc_ret) {
2222                         xran_process_tx_sym_cp_on_opt(pHandle, ctx_id, tti,
2223                                             0, num_CCPorts, 0, num_eAxc, frame_id, subframe_id, slot_id, sym_id,
2224                                             compType, XRAN_DIR_UL, xran_port_id, p_sec_db);
2225                     }
2226                     else
2227                     {
2228                         retval = 0;
2229                     }
2230                 }
2231             }
2232
2233             if(xran_get_ru_category(pHandle) == XRAN_CATEGORY_B
2234                     && p_xran_dev_ctx->enableSrs
2235                     && ((p_xran_dev_ctx->srs_cfg.symbMask >> sym_id)&1))
2236             {
2237                 xran_port_id = p_xran_dev_ctx->xran_port_id;
2238                 p_sec_db = p_sectiondb[xran_port_id];
2239                 compType = p_xran_dev_ctx->fh_cfg.ru_conf.xranCompHdrType;
2240                 struct xran_srs_config *pSrsCfg = &(p_xran_dev_ctx->srs_cfg);
2241                 struct xran_prb_map *prb_map;
2242                 /* check special frame */
2243                 if(xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_SP) ==  1)
2244                 {
2245                     if(((tti % p_xran_dev_ctx->fh_cfg.frame_conf.nTddPeriod) == pSrsCfg->slot)
2246                         && (p_xran_dev_ctx->ndm_srs_scheduled == 0))
2247                     {
2248
2249                         prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFHSrsRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][0].sBufferList.pBuffers->pData;
2250                         /* NDM U-Plane is not enabled */
2251                         if(pSrsCfg->ndm_offset == 0)
2252                         {
2253                             retval = xran_process_tx_srs_cp_on(pHandle, ctx_id, tti,
2254                                         0, num_CCPorts, 0, xran_get_num_ant_elm(pHandle), frame_id, subframe_id, slot_id, sym_id,
2255                                         compType, XRAN_DIR_UL, xran_port_id, p_sec_db);
2256                         }
2257                         /* NDM U-Plane is enabled, SRS U-Planes will be transmitted after ndm_offset (in slots) */
2258                         else
2259                         {
2260                             p_xran_dev_ctx->ndm_srs_scheduled   = 1;
2261                             p_xran_dev_ctx->ndm_srs_tti         = tti;
2262                             p_xran_dev_ctx->ndm_srs_txtti       = (tti + pSrsCfg->ndm_offset)%2000;
2263                             p_xran_dev_ctx->ndm_srs_schedperiod = pSrsCfg->slot;
2264                         }
2265                     }
2266                 }
2267                 /* check SRS NDM UP has been scheduled in non special slots */
2268                 else if(p_xran_dev_ctx->ndm_srs_scheduled
2269                         && p_xran_dev_ctx->ndm_srs_txtti == tti)
2270                 {
2271                     prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFHSrsRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][0].sBufferList.pBuffers->pData;
2272                     xran_port_id = p_xran_dev_ctx->xran_port_id;
2273                     p_sec_db = p_sectiondb[xran_port_id];
2274                     int ndm_step;
2275                     uint32_t srs_tti, srsFrame, srsSubframe, srsSlot, srs_sym;
2276                     uint8_t  srsCtx;
2277                     if(prb_map && prb_map->nPrbElm)
2278                     {
2279                         srs_sym = prb_map->prbMap[0].nStartSymb;
2280
2281                         srs_tti     = p_xran_dev_ctx->ndm_srs_tti;
2282                         num_eAxAntElm = xran_get_num_ant_elm(pHandle);
2283                         ndm_step    = num_eAxAntElm / pSrsCfg->ndm_txduration;
2284
2285                         srsSlot     = XranGetSlotNum(srs_tti, SLOTNUM_PER_SUBFRAME(interval));
2286                         srsSubframe = XranGetSubFrameNum(srs_tti,SLOTNUM_PER_SUBFRAME(interval),  SUBFRAMES_PER_SYSTEMFRAME);
2287                         srsFrame    = XranGetFrameNum(srs_tti,sfnSecStart,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME(interval));
2288                         srsFrame    = (srsFrame & 0xff);
2289                         srsCtx      = srs_tti % XRAN_MAX_SECTIONDB_CTX;
2290
2291                         if(sym_id < pSrsCfg->ndm_txduration)
2292                         {
2293                             retval = xran_process_tx_srs_cp_on(pHandle, srsCtx, srs_tti,
2294                                     0, num_CCPorts, sym_id*ndm_step, ndm_step, srsFrame, srsSubframe, srsSlot, srs_sym,
2295                                     compType, XRAN_DIR_UL, xran_port_id, p_sec_db);
2296                         }
2297                         else
2298                         {
2299                             p_xran_dev_ctx->ndm_srs_scheduled   = 0;
2300                             p_xran_dev_ctx->ndm_srs_tti         = 0;
2301                             p_xran_dev_ctx->ndm_srs_txtti       = 0;
2302                             p_xran_dev_ctx->ndm_srs_schedperiod = 0;
2303                         }
2304                     }
2305                 }
2306             }
2307         }
2308     }
2309     else  {
2310         if(first_call) {
2311             for (ant_id = 0; ant_id < num_eAxc; ant_id++)
2312             {
2313                 for (cc_id = 0; cc_id < num_CCPorts; cc_id++)
2314                 {
2315                     //struct xran_srs_config *p_srs_cfg = &(p_xran_dev_ctx->srs_cfg);
2316                     if(p_xran_dev_ctx->puschMaskEnable)
2317                     {
2318                         if((tti % numSlotMu1) != p_xran_dev_ctx->puschMaskSlot)
2319                             retval = xran_process_tx_sym_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id, 0);
2320                     }
2321                     else
2322                         retval = xran_process_tx_sym_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id, 0);
2323
2324                     if(p_xran_dev_ctx->enablePrach && (ant_id < num_eAxc_prach) )
2325                     {
2326                         retval = xran_process_tx_prach_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id, sym_id);
2327                     }
2328                 }
2329             }
2330
2331             if(xran_get_ru_category(pHandle) == XRAN_CATEGORY_B
2332                     && p_xran_dev_ctx->enableSrs
2333                     && ((p_xran_dev_ctx->srs_cfg.symbMask >> sym_id)&1))
2334             {
2335                 struct xran_srs_config *pSrsCfg = &(p_xran_dev_ctx->srs_cfg);
2336
2337                 for(cc_id = 0; cc_id < num_CCPorts; cc_id++)
2338                 {
2339                     /* check special frame */
2340                     if((xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_SP) ==  1)
2341                         ||(xran_fs_get_slot_type(PortId, cc_id, tti, XRAN_SLOT_TYPE_UL) ==  1))
2342                     {
2343                         if(((tti % p_xran_dev_ctx->fh_cfg.frame_conf.nTddPeriod) == pSrsCfg->slot)
2344                             && (p_xran_dev_ctx->ndm_srs_scheduled == 0))
2345                         {
2346                             int elmIdx;
2347                             struct xran_prb_map *prb_map;
2348                             prb_map = (struct xran_prb_map *)p_xran_dev_ctx->sFHSrsRxPrbMapBbuIoBufCtrl[tti % XRAN_N_FE_BUF_LEN][cc_id][0].sBufferList.pBuffers->pData;
2349
2350                             /* if PRB map is present in first antenna, assume SRS might be scheduled. */
2351                             if(prb_map && prb_map->nPrbElm)
2352                             {
2353                                 /* NDM U-Plane is not enabled */
2354                                 if(pSrsCfg->ndm_offset == 0)
2355                                 {
2356
2357                                     if (prb_map->nPrbElm > 0)
2358                                     {
2359                                         if(sym_id >= prb_map->prbMap[0].nStartSymb
2360                                                 && sym_id < (prb_map->prbMap[0].nStartSymb + prb_map->prbMap[0].numSymb))
2361                                             for(ant_id=0; ant_id < xran_get_num_ant_elm(pHandle); ant_id++)
2362                                                 xran_process_tx_srs_cp_off(pHandle, ctx_id, tti, cc_id, ant_id, frame_id, subframe_id, slot_id);
2363                                     }
2364
2365                                 }
2366                                 /* NDM U-Plane is enabled, SRS U-Planes will be transmitted after ndm_offset (in slots) */
2367                                 else
2368                                 {
2369                                     p_xran_dev_ctx->ndm_srs_scheduled   = 1;
2370                                     p_xran_dev_ctx->ndm_srs_tti         = tti;
2371                                     p_xran_dev_ctx->ndm_srs_txtti       = (tti + pSrsCfg->ndm_offset)%2000;
2372                                     p_xran_dev_ctx->ndm_srs_schedperiod = pSrsCfg->slot;
2373                                 }
2374                             }
2375                         }
2376                     }
2377                     /* check SRS NDM UP has been scheduled in non special slots */
2378                     /*NDM feature enables the spread of SRS packets
2379                     Non delay measurement SRS PDSCH PUSCH delay measure it*/
2380                     else if(p_xran_dev_ctx->ndm_srs_scheduled
2381                             && p_xran_dev_ctx->ndm_srs_txtti == tti)
2382                     {
2383                         int ndm_step;
2384                         uint32_t srs_tti, srsFrame, srsSubframe, srsSlot;
2385                         uint8_t  srsCtx;
2386
2387                         srs_tti     = p_xran_dev_ctx->ndm_srs_tti;
2388                         num_eAxAntElm = xran_get_num_ant_elm(pHandle);
2389                         ndm_step    = num_eAxAntElm / pSrsCfg->ndm_txduration;
2390
2391                         srsSlot     = XranGetSlotNum(srs_tti, SLOTNUM_PER_SUBFRAME(interval));
2392                         srsSubframe = XranGetSubFrameNum(srs_tti,SLOTNUM_PER_SUBFRAME(interval),  SUBFRAMES_PER_SYSTEMFRAME);
2393                         srsFrame    = XranGetFrameNum(srs_tti,sfnSecStart,SUBFRAMES_PER_SYSTEMFRAME, SLOTNUM_PER_SUBFRAME(interval));
2394                         srsFrame    = (srsFrame & 0xff);
2395                         srsCtx      = srs_tti % XRAN_MAX_SECTIONDB_CTX;
2396
2397                         if(sym_id < pSrsCfg->ndm_txduration)
2398                         {
2399                             for(ant_id=sym_id*ndm_step; ant_id < (sym_id+1)*ndm_step; ant_id++)
2400                                 xran_process_tx_srs_cp_off(pHandle, srsCtx, srs_tti, cc_id, ant_id, srsFrame, srsSubframe, srsSlot);
2401                         }
2402                         else
2403                         {
2404                             p_xran_dev_ctx->ndm_srs_scheduled   = 0;
2405                             p_xran_dev_ctx->ndm_srs_tti         = 0;
2406                             p_xran_dev_ctx->ndm_srs_txtti       = 0;
2407                             p_xran_dev_ctx->ndm_srs_schedperiod = 0;
2408                         }
2409                     }
2410                 }
2411             }
2412         }
2413     }
2414
2415     MLogXRANTask(PID_DISPATCH_TX_SYM, t1, MLogXRANTick());
2416     return retval;
2417 }
2418
2419 struct cp_up_tx_desc *
2420 xran_pkt_gen_desc_alloc(void)
2421 {
2422     struct rte_mbuf * mb =  rte_pktmbuf_alloc(_eth_mbuf_pkt_gen);
2423     struct cp_up_tx_desc * p_desc = NULL;
2424     char * start     = NULL;
2425
2426     if(mb){
2427         start     = rte_pktmbuf_append(mb, sizeof(struct cp_up_tx_desc));
2428         if(start) {
2429             p_desc = rte_pktmbuf_mtod(mb,  struct cp_up_tx_desc *);
2430             if(p_desc){
2431                 p_desc->mb = mb;
2432                 return p_desc;
2433             }
2434         }
2435     }
2436     return p_desc;
2437 }
2438
2439 int32_t
2440 xran_pkt_gen_desc_free(struct cp_up_tx_desc *p_desc)
2441 {
2442     if (p_desc){
2443         if(p_desc->mb){
2444             rte_pktmbuf_free(p_desc->mb);
2445             return 0;
2446         } else {
2447             rte_panic("p_desc->mb == NULL\n");
2448         }
2449     }
2450     return -1;
2451 }
2452