1 /*******************************************************************************
2 ################################################################################
3 # Copyright (c) [2017-2019] [Radisys] #
5 # Licensed under the Apache License, Version 2.0 (the "License"); #
6 # you may not use this file except in compliance with the License. #
7 # You may obtain a copy of the License at #
9 # http://www.apache.org/licenses/LICENSE-2.0 #
11 # Unless required by applicable law or agreed to in writing, software #
12 # distributed under the License is distributed on an "AS IS" BASIS, #
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 # See the License for the specific language governing permissions and #
15 # limitations under the License. #
16 ################################################################################
17 *******************************************************************************/
19 /************************************************************************
25 Desc: C source code for Entry point fucntions
29 **********************************************************************/
31 /** @file rg_sch_cmn.c
32 @brief This file implements the schedulers main access to MAC layer code.
35 static const char* RLOG_MODULE_NAME="MAC";
36 static int RLOG_FILE_ID=187;
37 static int RLOG_MODULE_ID=4096;
39 /* header include files -- defines (.h) */
40 #include "envopt.h" /* environment options */
41 #include "envdep.h" /* environment dependent */
42 #include "envind.h" /* environment independent */
43 #include "gen.h" /* general layer */
44 #include "ssi.h" /* system service interface */
45 #include "cm_hash.h" /* common hash list */
46 #include "cm_llist.h" /* common linked list library */
47 #include "cm_err.h" /* common error */
48 #include "cm_lte.h" /* common LTE */
55 #include "rg_sch_err.h"
56 #include "rg_sch_inf.h"
58 #include "rg_sch_cmn.h"
59 #include "rl_interface.h"
60 #include "rl_common.h"
62 /* header/extern include files (.x) */
63 #include "gen.x" /* general layer typedefs */
64 #include "ssi.x" /* system services typedefs */
65 #include "cm5.x" /* common timers */
66 #include "cm_hash.x" /* common hash list */
67 #include "cm_lib.x" /* common library */
68 #include "cm_llist.x" /* common linked list */
69 #include "cm_mblk.x" /* memory management */
70 #include "cm_tkns.x" /* common tokens */
71 #include "cm_lte.x" /* common tokens */
72 #include "tfu.x" /* TFU types */
73 #include "lrg.x" /* layer management typedefs for MAC */
74 #include "rgr.x" /* layer management typedefs for MAC */
75 #include "rgm.x" /* layer management typedefs for MAC */
76 #include "rg_sch_inf.x" /* typedefs for Scheduler */
77 #include "rg_sch.x" /* typedefs for Scheduler */
78 #include "rg_sch_cmn.x" /* typedefs for Scheduler */
80 #include "lrg.x" /* Stats Structures */
81 #endif /* MAC_SCH_STATS */
84 #endif /* __cplusplus */
87 EXTERN U32 emtcStatsUlTomSrInd;
88 EXTERN U32 emtcStatsUlBsrTmrTxp;
91 #define RG_ITBS_DIFF(_x, _y) ((_x) > (_y) ? (_x) - (_y) : (_y) - (_x))
92 EXTERN Void rgSCHSc1UlInit ARGS((RgUlSchdApis *apis));
93 #ifdef RG_PHASE2_SCHED
94 EXTERN Void rgSCHRrUlInit ARGS((RgUlSchdApis *apis));
96 EXTERN Void rgSCHEmtcHqInfoFree ARGS((RgSchCellCb *cell, RgSchDlHqProcCb *hqP));
97 EXTERN Void rgSCHEmtcRrUlInit ARGS((RgUlSchdApis *apis));
98 EXTERN Void rgSCHEmtcCmnDlInit ARGS((Void));
99 EXTERN Void rgSCHEmtcCmnUlInit ARGS((Void));
100 EXTERN Void rgSCHEmtcCmnUeNbReset ARGS((RgSchUeCb *ueCb));
101 EXTERN RgSchCmnCqiToTbs *rgSchEmtcCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
103 EXTERN Void rgSCHMaxciUlInit ARGS((RgUlSchdApis *apis));
104 EXTERN Void rgSCHPfsUlInit ARGS((RgUlSchdApis *apis));
106 EXTERN Void rgSCHSc1DlInit ARGS((RgDlSchdApis *apis));
107 #ifdef RG_PHASE2_SCHED
108 EXTERN Void rgSCHRrDlInit ARGS((RgDlSchdApis *apis));
110 EXTERN Void rgSCHEmtcRrDlInit ARGS((RgDlEmtcSchdApis *apis));
112 EXTERN Void rgSCHMaxciDlInit ARGS((RgDlSchdApis *apis));
113 EXTERN Void rgSCHPfsDlInit ARGS((RgDlSchdApis *apis));
115 EXTERN Void rgSCHDlfsInit ARGS((RgDlfsSchdApis *apis));
119 EXTERN Void rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl ARGS((RgSchCellCb *cell));
120 EXTERN Void rgSCHCmnGetEmtcDciFrmtSizes ARGS((RgSchCellCb *cell));
121 EXTERN Void rgSCHEmtcRrUlProcRmvFrmRetx ARGS((RgSchCellCb *cell, RgSchUlHqProcCb *proc));
122 EXTERN S16 rgSCHCmnPrecompEmtcMsg3Vars
124 RgSchCmnUlCell *cellUl,
130 PUBLIC Void rgSCHEmtcCmnUeCcchSduDel
135 EXTERN Void rgSCHEmtcRmvFrmTaLst
137 RgSchCmnDlCell *cellDl,
140 EXTERN Void rgSCHEmtcInitTaLst
142 RgSchCmnDlCell *cellDl
144 EXTERN Void rgSCHEmtcAddToTaLst
146 RgSchCmnDlCell *cellDl,
153 PRIVATE Void rgSCHDlSiSched ARGS((RgSchCellCb *cell,
154 RgSchCmnDlRbAllocInfo *allocInfo,
155 RgInfSfAlloc *subfrmAlloc));
156 PRIVATE Void rgSCHChkNUpdSiCfg ARGS((RgSchCellCb *cell));
157 PRIVATE Void rgSCHSelectSi ARGS((RgSchCellCb *cell));
158 #endif /*RGR_SI_SCH*/
159 /* LTE_ADV_FLAG_REMOVED_START */
161 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
169 PRIVATE S16 rgSCHCmnBuildRntpInfo (
178 PUBLIC Void rgSCHCmnDlSpsSch
182 /* LTE_ADV_FLAG_REMOVED_END */
184 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc ARGS((
186 RgSchCmnDlRbAllocInfo *allocInfo
188 PRIVATE Void rgSCHBcchPcchDlRbAlloc ARGS((
190 RgSchCmnDlRbAllocInfo *allocInfo
192 PRIVATE Void rgSCHCmnDlBcchPcchAlloc ARGS((
196 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
199 TfuDlCqiPucch *pucchCqi,
200 RgrUeCqiRept *ueCqiRept,
202 Bool *is2ndCwCqiAvail
204 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
207 TfuDlCqiPusch *puschCqi,
208 RgrUeCqiRept *ueCqiRept,
210 Bool *is2ndCwCqiAvail
213 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
216 TfuDlCqiPucch *pucchCqi
218 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
221 TfuDlCqiPusch *puschCqi
224 /* ccpu00117452 - MOD - Changed macro name from
225 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
227 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept ARGS((
230 RgrUeCqiRept *ueCqiRept));
231 #endif /* End of RGR_CQI_REPT */
232 /* Fix: syed align multiple UEs to refresh at same time */
233 PRIVATE Void rgSCHCmnGetRefreshPer ARGS((
237 PRIVATE S16 rgSCHCmnApplyUeRefresh ARGS((
241 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa ARGS
246 PRIVATE Void rgSCHCheckAndSetTxScheme ARGS
254 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz ARGS
264 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw ARGS
280 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
284 RgSchDlRbAlloc *allocInfo,
287 PRIVATE Void rgSCHCmnInitRbAlloc ARGS
293 #endif /* __cplusplus */
297 PUBLIC RgSchdApis rgSchCmnApis;
298 PRIVATE RgUlSchdApis rgSchUlSchdTbl[RGSCH_NUM_SCHEDULERS];
299 PRIVATE RgDlSchdApis rgSchDlSchdTbl[RGSCH_NUM_SCHEDULERS];
301 PRIVATE RgUlSchdApis rgSchEmtcUlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
302 PRIVATE RgDlEmtcSchdApis rgSchEmtcDlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
304 #ifdef RG_PHASE2_SCHED
305 PRIVATE RgDlfsSchdApis rgSchDlfsSchdTbl[RGSCH_NUM_DLFS_SCHEDULERS];
307 PRIVATE RgUlSchdInits rgSchUlSchdInits = RGSCH_ULSCHED_INITS;
308 PRIVATE RgDlSchdInits rgSchDlSchdInits = RGSCH_DLSCHED_INITS;
310 PRIVATE RgEmtcUlSchdInits rgSchEmtcUlSchdInits = RGSCH_EMTC_ULSCHED_INITS;
311 PRIVATE RgEmtcDlSchdInits rgSchEmtcDlSchdInits = RGSCH_EMTC_DLSCHED_INITS;
313 #if (defined (RG_PHASE2_SCHED) && defined (TFU_UPGRADE))
314 PRIVATE RgDlfsSchdInits rgSchDlfsSchdInits = RGSCH_DLFSSCHED_INITS;
317 typedef Void (*RgSchCmnDlAllocRbFunc) ARGS((RgSchCellCb *cell, RgSchDlSf *subFrm,
318 RgSchUeCb *ue, U32 bo, U32 *effBo, RgSchDlHqProcCb *proc,
319 RgSchCmnDlRbAllocInfo *cellWdAllocInfo));
320 typedef U8 (*RgSchCmnDlGetPrecInfFunc) ARGS((RgSchCellCb *cell, RgSchUeCb *ue,
321 U8 numLyrs, Bool bothCwEnbld));
323 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1 ARGS((
325 RgSchDlRbAlloc *rbAllocInfo,
326 RgSchDlHqProcCb *hqP,
330 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A ARGS((
332 RgSchDlRbAlloc *rbAllocInfo,
333 RgSchDlHqProcCb *hqP,
337 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B ARGS((
339 RgSchDlRbAlloc *rbAllocInfo,
340 RgSchDlHqProcCb *hqP,
344 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2 ARGS((
346 RgSchDlRbAlloc *rbAllocInfo,
347 RgSchDlHqProcCb *hqP,
351 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A ARGS((
353 RgSchDlRbAlloc *rbAllocInfo,
354 RgSchDlHqProcCb *hqP,
358 PRIVATE Void rgSCHCmnDlAllocTxRbTM1 ARGS((
364 RgSchDlHqProcCb *proc,
365 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
367 PRIVATE Void rgSCHCmnDlAllocTxRbTM2 ARGS((
373 RgSchDlHqProcCb *proc,
374 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
376 PRIVATE Void rgSCHCmnDlAllocTxRbTM3 ARGS((
382 RgSchDlHqProcCb *proc,
383 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
385 PRIVATE Void rgSCHCmnDlAllocTxRbTM4 ARGS((
391 RgSchDlHqProcCb *proc,
392 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
395 PRIVATE Void rgSCHCmnDlAllocTxRbTM5 ARGS((
401 RgSchDlHqProcCb *proc,
402 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
405 PRIVATE Void rgSCHCmnDlAllocTxRbTM6 ARGS((
411 RgSchDlHqProcCb *proc,
412 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
414 PRIVATE Void rgSCHCmnDlAllocTxRbTM7 ARGS((
420 RgSchDlHqProcCb *proc,
421 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
423 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1 ARGS((
429 RgSchDlHqProcCb *proc,
430 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
432 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2 ARGS((
438 RgSchDlHqProcCb *proc,
439 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
441 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3 ARGS((
447 RgSchDlHqProcCb *proc,
448 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
450 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4 ARGS((
456 RgSchDlHqProcCb *proc,
457 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
460 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5 ARGS((
466 RgSchDlHqProcCb *proc,
467 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
470 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6 ARGS((
476 RgSchDlHqProcCb *proc,
477 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
479 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7 ARGS((
485 RgSchDlHqProcCb *proc,
486 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
490 PRIVATE U8 rgSchGetN1ResCount ARGS ((
494 PUBLIC Bool rgSchCmnChkDataOnlyOnPcell
500 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
507 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
508 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[7] = {rgSCHCmnDlAllocTxRbTM1,
509 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
510 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7};
512 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
513 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[7] = {rgSCHCmnDlAllocRetxRbTM1,
514 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
515 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7};
517 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
518 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[9] = {rgSCHCmnDlAllocTxRbTM1,
519 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
520 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7, NULLP, NULLP};
522 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
523 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[9] = {rgSCHCmnDlAllocRetxRbTM1,
524 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
525 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7, NULLP, NULLP};
530 PRIVATE U8 rgSCHCmnDlTM3PrecInf2 ARGS((
536 PRIVATE U8 rgSCHCmnDlTM3PrecInf4 ARGS((
542 PRIVATE U8 rgSCHCmnDlTM4PrecInf2 ARGS((
548 PRIVATE U8 rgSCHCmnDlTM4PrecInf4 ARGS((
554 /* Functions specific to each transmission mode for DL RB Allocation*/
555 RgSchCmnDlGetPrecInfFunc getPrecInfoFunc[2][2] = {
556 {rgSCHCmnDlTM3PrecInf2, rgSCHCmnDlTM3PrecInf4},
557 {rgSCHCmnDlTM4PrecInf2, rgSCHCmnDlTM4PrecInf4}
560 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb ARGS((
564 RgSchDlHqTbCb *tbInfo,
569 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb ARGS((
573 RgSchDlHqProcCb *proc,
578 PRIVATE Void rgSCHCmnDlTM3TxTx ARGS((
584 RgSchDlHqProcCb *proc,
585 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
587 PRIVATE Void rgSCHCmnDlTM3TxRetx ARGS((
593 RgSchDlHqProcCb *proc,
594 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
596 PRIVATE Void rgSCHCmnDlTM3RetxRetx ARGS((
602 RgSchDlHqProcCb *proc,
603 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
606 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc ARGS((
612 /* LTE_ADV_FLAG_REMOVED_START */
614 PRIVATE Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc ARGS((
621 /* LTE_ADV_FLAG_REMOVED_END */
622 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx ARGS((
624 RgSchCmnDlRbAllocInfo *allocInfo,
626 RgSchDlHqProcCb *proc
628 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx ARGS((
630 RgSchCmnDlRbAllocInfo *allocInfo,
634 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst ARGS((
635 RgSchCmnDlRbAllocInfo *allocInfo,
637 RgSchDlHqProcCb *proc
639 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb ARGS((
643 RgSchDlHqTbCb *reTxTb,
648 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb ARGS((
652 RgSchDlHqProcCb *proc,
657 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb ARGS((
661 RgSchDlHqTbCb *tbInfo,
667 PRIVATE Void rgSCHCmnFillHqPTb ARGS((
669 RgSchDlRbAlloc *rbAllocInfo,
675 PRIVATE Void rgSCHCmnDlGetBestFitHole ARGS((
684 #ifdef RGSCH_SPS_UNUSED
685 PRIVATE U32 rgSCHCmnGetRaType1Mask ARGS((
691 PRIVATE U32 rgSCHCmnGetRaType0Mask ARGS((
695 PRIVATE U32 rgSCHCmnGetRaType2Mask ARGS((
701 PUBLIC Bool rgSCHCmnRetxAllocAvoid ARGS((
704 RgSchDlHqProcCb *proc
707 PUBLIC U16 rgSCHCmnGetSiSetId ARGS((
714 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi ARGS((
727 //TODO_SID: Currenly table is only for 100 Prbs. Need to modify wrt VRBG table 8.1.5.2.1-1 V5G_213
728 U32 rgSch5gtfTbSzTbl[MAX_5GTF_MCS] =
729 {1864, 5256, 8776, 13176, 17576, 21976, 26376, 31656, 35176, 39576, 43976, 47496, 52776, 59376, 66392};
731 U32 gUl5gtfSrRecv = 0;
732 U32 gUl5gtfBsrRecv = 0;
733 U32 gUl5gtfUeSchPick = 0;
734 U32 gUl5gtfPdcchSchd = 0;
735 U32 gUl5gtfAllocAllocated = 0;
736 U32 gUl5gtfUeRbAllocDone = 0;
737 U32 gUl5gtfUeRmvFnlzZeroBo = 0;
738 U32 gUl5gtfUeFnlzReAdd = 0;
739 U32 gUl5gtfPdcchSend = 0;
740 U32 gUl5gtfRbAllocFail = 0;
741 U32 ul5gtfsidUlMarkUl = 0;
742 U32 ul5gtfsidDlSchdPass = 0;
743 U32 ul5gtfsidDlAlreadyMarkUl = 0;
744 U32 ul5gtfTotSchdCnt = 0;
747 /* CQI Offset Index to Beta CQI Offset value mapping,
748 * stored as parts per 1000. Reserved is set to 0.
749 * Refer 36.213 sec 8.6.3 Tbl 8.6.3-3 */
750 PUBLIC U32 rgSchCmnBetaCqiOffstTbl[16] = {0, 0, 1125,
751 1250, 1375, 1625, 1750, 2000, 2250, 2500, 2875,
752 3125, 3500, 4000, 5000, 6250};
753 PUBLIC U32 rgSchCmnBetaHqOffstTbl[16] = {2000, 2500, 3125,
754 4000, 5000, 6250, 8000,10000, 12625, 15875, 20000,
755 31000, 50000,80000,126000,0};
756 PUBLIC U32 rgSchCmnBetaRiOffstTbl[16] = {1250, 1625, 2000,
757 2500, 3125, 4000, 5000, 6250, 8000, 10000,12625,
759 PUBLIC S8 rgSchCmnDlCqiDiffOfst[8] = {0, 1, 2, 3, -4, -3, -2, -1};
761 /* Include CRS REs while calculating Efficiency */
762 CONSTANT PRIVATE U8 rgSchCmnAntIdx[5] = {0,0,1,0,2};
763 CONSTANT PRIVATE U8 rgSchCmnNumResForCrs[5] = {0,6,12,0,16};
770 PUBLIC S8 rgSchCmnApUeSelDiffCqi[4] = {1, 2, 3, 4};
771 PUBLIC S8 rgSchCmnApEnbConfDiffCqi[4] = {0, 1, 2, -1};
774 typedef struct rgSchCmnDlUeDciFrmtOptns
776 TfuDciFormat spfcDciFrmt; /* TM(Transmission Mode) specific DCI format.
777 * Search space : UE Specific by C-RNTI only. */
778 U8 spfcDciRAType; /* Resource Alloctn(RA) type for spfcDciFrmt */
779 TfuDciFormat prfrdDciFrmt; /* Preferred DCI format among the available
780 * options for TD (Transmit Diversity) */
781 U8 prfrdDciRAType; /* Resource Alloctn(RA) type for prfrdDciFrmt */
782 }RgSchCmnDlUeDciFrmtOptns;
785 /* DCI Format options for each Transmission Mode */
786 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[7] = {
787 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
788 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
789 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
790 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
791 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
792 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
793 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
797 /* DCI Format options for each Transmission Mode */
798 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[9] = {
799 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
800 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
801 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
802 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
803 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
804 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
805 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
810 typedef struct rgSchCmnDlImcsTbl
812 U8 modOdr; /* Modulation Order */
814 }RgSchCmnDlImcsTbl[29];
816 CONSTANT struct rgSchCmnMult235Info
818 U8 match; /* Closest number satisfying 2^a.3^b.5^c, with a bias
819 * towards the smaller number */
820 U8 prvMatch; /* Closest number not greater than array index
821 * satisfying 2^a.3^b.5^c */
822 } rgSchCmnMult235Tbl[110+1] = {
824 {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {6, 6}, {8, 8},
825 {9, 9}, {10, 10}, {10, 10}, {12, 12}, {12, 12}, {15, 12}, {15, 15},
826 {16, 16}, {16, 16}, {18, 18}, {18, 18}, {20, 20}, {20, 20}, {20, 20},
827 {24, 20}, {24, 24}, {25, 25}, {25, 25}, {27, 27}, {27, 27}, {30, 27},
828 {30, 30}, {30, 30}, {32, 32}, {32, 32}, {32, 32}, {36, 32}, {36, 36},
829 {36, 36}, {36, 36}, {40, 36}, {40, 40}, {40, 40}, {40, 40}, {45, 40},
830 {45, 40}, {45, 45}, {45, 45}, {48, 45}, {48, 48}, {48, 48}, {50, 50},
831 {50, 50}, {50, 50}, {54, 50}, {54, 54}, {54, 54}, {54, 54}, {54, 54},
832 {60, 54}, {60, 54}, {60, 60}, {60, 60}, {60, 60}, {64, 60}, {64, 64},
833 {64, 64}, {64, 64}, {64, 64}, {64, 64}, {72, 64}, {72, 64}, {72, 64},
834 {72, 72}, {72, 72}, {75, 72}, {75, 75}, {75, 75}, {75, 75}, {80, 75},
835 {80, 75}, {80, 80}, {81, 81}, {81, 81}, {81, 81}, {81, 81}, {81, 81},
836 {90, 81}, {90, 81}, {90, 81}, {90, 81}, {90, 90}, {90, 90}, {90, 90},
837 {90, 90}, {96, 90}, {96, 90}, {96, 96}, {96, 96}, {96, 96}, {100, 96},
838 {100, 100}, {100, 100}, {100, 100}, {100, 100}, {100, 100}, {108, 100},
839 {108, 100}, {108, 100}, {108, 108}, {108, 108}, {108, 108}
843 /* BI table from 36.321 Table 7.2.1 */
844 CONSTANT PRIVATE S16 rgSchCmnBiTbl[RG_SCH_CMN_NUM_BI_VAL] = {
845 0, 10, 20, 30,40,60,80,120,160,240,320,480,960};
846 PUBLIC RgSchCmnUlCqiInfo rgSchCmnUlCqiTbl[RG_SCH_CMN_UL_NUM_CQI] = {
848 {RGSCH_CMN_QM_CQI_1,RGSCH_CMN_UL_EFF_CQI_1 },
849 {RGSCH_CMN_QM_CQI_2,RGSCH_CMN_UL_EFF_CQI_2 },
850 {RGSCH_CMN_QM_CQI_3,RGSCH_CMN_UL_EFF_CQI_3 },
851 {RGSCH_CMN_QM_CQI_4,RGSCH_CMN_UL_EFF_CQI_4 },
852 {RGSCH_CMN_QM_CQI_5,RGSCH_CMN_UL_EFF_CQI_5 },
853 {RGSCH_CMN_QM_CQI_6,RGSCH_CMN_UL_EFF_CQI_6 },
854 {RGSCH_CMN_QM_CQI_7,RGSCH_CMN_UL_EFF_CQI_7 },
855 {RGSCH_CMN_QM_CQI_8,RGSCH_CMN_UL_EFF_CQI_8 },
856 {RGSCH_CMN_QM_CQI_9,RGSCH_CMN_UL_EFF_CQI_9 },
857 {RGSCH_CMN_QM_CQI_10,RGSCH_CMN_UL_EFF_CQI_10 },
858 {RGSCH_CMN_QM_CQI_11,RGSCH_CMN_UL_EFF_CQI_11 },
859 {RGSCH_CMN_QM_CQI_12,RGSCH_CMN_UL_EFF_CQI_12 },
860 {RGSCH_CMN_QM_CQI_13,RGSCH_CMN_UL_EFF_CQI_13 },
861 {RGSCH_CMN_QM_CQI_14,RGSCH_CMN_UL_EFF_CQI_14 },
862 {RGSCH_CMN_QM_CQI_15,RGSCH_CMN_UL_EFF_CQI_15 },
866 /* This table maps a (delta_offset * 2 + 2) to a (beta * 8)
867 * where beta is 10^-(delta_offset/10) rounded off to nearest 1/8
869 PRIVATE U16 rgSchCmnUlBeta8Tbl[29] = {
870 6, RG_SCH_CMN_UL_INVALID_BETA8, 8, 9, 10, 11, 13, 14, 16, 18, 20, 23,
871 25, 28, 32, RG_SCH_CMN_UL_INVALID_BETA8, 40, RG_SCH_CMN_UL_INVALID_BETA8,
872 50, RG_SCH_CMN_UL_INVALID_BETA8, 64, RG_SCH_CMN_UL_INVALID_BETA8, 80,
873 RG_SCH_CMN_UL_INVALID_BETA8, 101, RG_SCH_CMN_UL_INVALID_BETA8, 127,
874 RG_SCH_CMN_UL_INVALID_BETA8, 160
878 /* QCI to SVC priority mapping. Index specifies the Qci*/
879 PRIVATE U8 rgSchCmnDlQciPrio[RG_SCH_CMN_MAX_QCI] = RG_SCH_CMN_QCI_TO_PRIO;
881 /* The configuration is efficiency measured per 1024 REs. */
882 /* The first element stands for when CQI is not known */
883 /* This table is used to translate CQI to its corrospoding */
884 /* allocation parameters. These are currently from 36.213 */
885 /* Just this talbe needs to be edited for modifying the */
886 /* the resource allocation behaviour */
888 /* ADD CQI to MCS mapping correction
889 * single dimensional array is replaced by 2 dimensions for different CFI*/
890 PRIVATE U16 rgSchCmnCqiPdschEff[4][16] = {RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI1,
891 RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI2,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI3};
893 PRIVATE U16 rgSchCmn2LyrCqiPdschEff[4][16] = {RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI1,
894 RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI2, RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI3};
896 /* This configuration determines the transalation of a UEs CQI to its */
897 /* PDCCH coding efficiency. This may be edited based on the installation */
898 PRIVATE U8 rgSchCmnDlRvTbl[4] = {0, 2, 3, 1}; /* RVIdx sequence is corrected*/
900 /* Indexed by [DciFrmt].
901 * Considering the following definition in determining the dciFrmt index.
916 PRIVATE U16 rgSchCmnDciFrmtSizes[10];
919 PRIVATE U16 rgSchCmnCqiPdcchEff[16] = RG_SCH_CMN_CQI_TO_PDCCH_EFF;
923 PUBLIC RgSchTddUlDlSubfrmTbl rgSchTddUlDlSubfrmTbl = {
924 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME},
925 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
926 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
927 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
928 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
929 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
930 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME}
935 PUBLIC U8 rgSchTddSpsDlMaxRetxTbl[RGSCH_MAX_TDD_UL_DL_CFG] = {
947 /* Special Subframes in OFDM symbols */
948 /* ccpu00134197-MOD-Correct the number of symbols */
949 PUBLIC RgSchTddSplSubfrmInfoTbl rgSchTddSplSubfrmInfoTbl = {
953 {11, 1, 1, 10, 1, 1},
961 /* PHICH 'm' value Table */
962 PUBLIC RgSchTddPhichMValTbl rgSchTddPhichMValTbl = {
963 {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
964 {0, 1, 0, 0, 1, 0, 1, 0, 0, 1},
965 {0, 0, 0, 1, 0, 0, 0, 0, 1, 0},
966 {1, 0, 0, 0, 0, 0, 0, 0, 1, 1},
967 {0, 0, 0, 0, 0, 0, 0, 0, 1, 1},
968 {0, 0, 0, 0, 0, 0, 0, 0, 1, 0},
969 {1, 1, 0, 0, 0, 1, 1, 0, 0, 1}
972 /* PHICH 'K' value Table */
973 PUBLIC RgSchTddKPhichTbl rgSchTddKPhichTbl = {
974 {0, 0, 4, 7, 6, 0, 0, 4, 7, 6},
975 {0, 0, 4, 6, 0, 0, 0, 4, 6, 0},
976 {0, 0, 6, 0, 0, 0, 0, 6, 0, 0},
977 {0, 0, 6, 6, 6, 0, 0, 0, 0, 0},
978 {0, 0, 6, 6, 0, 0, 0, 0, 0, 0},
979 {0, 0, 6, 0, 0, 0, 0, 0, 0, 0},
980 {0, 0, 4, 6, 6, 0, 0, 4, 7, 0}
983 /* Uplink association index 'K' value Table */
984 PUBLIC RgSchTddUlAscIdxKDashTbl rgSchTddUlAscIdxKDashTbl = {
985 {0, 0, 6, 4, 0, 0, 0, 6, 4, 0},
986 {0, 0, 4, 0, 0, 0, 0, 4, 0, 0},
987 {0, 0, 4, 4, 4, 0, 0, 0, 0, 0},
988 {0, 0, 4, 4, 0, 0, 0, 0, 0, 0},
989 {0, 0, 4, 0, 0, 0, 0, 0, 0, 0},
990 {0, 0, 7, 7, 5, 0, 0, 7, 7, 0}
994 /* PUSCH 'K' value Table */
995 PUBLIC RgSchTddPuschTxKTbl rgSchTddPuschTxKTbl = {
996 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
997 {0, 6, 0, 0, 4, 0, 6, 0, 0, 4},
998 {0, 0, 0, 4, 0, 0, 0, 0, 4, 0},
999 {4, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1000 {0, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1001 {0, 0, 0, 0, 0, 0, 0, 0, 4, 0},
1002 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1005 /* PDSCH to PUCCH Table for DL Harq Feed back. Based on the
1006 Downlink association set index 'K' table */
1007 PUBLIC U8 rgSchTddPucchTxTbl[7][10] = {
1008 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1009 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1010 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1011 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1012 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1013 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1014 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1017 /* Table to fetch the next DL sf idx for applying the
1018 new CFI. The next Dl sf Idx at which the new CFI
1019 is applied is always the starting Sf of the next ACK/NACK
1022 Ex: In Cfg-2, sf4 and sf9 are the only subframes at which
1023 a new ACK/NACK bundle of DL subframes can start
1025 D S U D D D S U D D D S U D D D S U D D
1028 dlSf Array for Cfg-2:
1029 sfNum: 0 1 3 4 5 6 8 9 0 1 3 4 5 6 8 9
1030 sfIdx: 0 1 2 3 4 5 6 7 8 9 10 11 12 12 14 15
1032 If CFI changes at sf0, nearest DL SF bundle >= 4 TTI is sf4
1033 So at sf4 the new CFI can be applied. To arrive at sf4 from
1034 sf0, the sfIdx has to be increased by 3 */
1036 PUBLIC U8 rgSchTddPdcchSfIncTbl[7][10] = {
1037 /* A/N Bundl: 0,1,5,6*/ {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
1038 /* A/N Bundl: 0,4,5,9*/ {2, 2, 0, 0, 3, 2, 2, 0, 0, 3},
1039 /* A/N Bundl: 4,9*/ {3, 6, 0, 5, 4, 3, 6, 0, 5, 4},
1040 /* A/N Bundl: 1,7,9*/ {4, 3, 0, 0, 0, 4, 5, 4, 6, 5},
1041 /* A/N Bundl: 0,6*/ {4, 3, 0, 0, 6, 5, 4, 7, 6, 5},
1042 /* A/N Bundl: 9*/ {8, 7, 0, 6, 5, 4, 12, 11, 10, 9},
1043 /* A/N Bundl: 0,1,5,6,9*/ {2, 1, 0, 0, 0, 2, 2, 0, 0, 3}
1047 /* combine compilation fixes */
1049 /* subframe offset values to be used when twoIntervalsConfig is enabled in UL
1051 PUBLIC RgSchTddSfOffTbl rgSchTddSfOffTbl = {
1052 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1053 {0, 0, 1, -1, 0, 0, 0, 1, -1, 0},
1054 {0, 0, 5, 0, 0, 0, 0, -5, 0, 0},
1055 {0, 0, 1, 1, -2, 0, 0, 0, 0, 0},
1056 {0, 0, 1, -1, 0, 0, 0, 0, 0, 0},
1057 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1058 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
1062 /* Table to determine when uplink SPS configured grants should
1063 * explicitly be reserved in a subframe. When enries are same
1064 * as that of Msg3SubfrmTbl, indicates competition with msg3.
1065 * As of now, this is same as Msg3SubfrmTbl (leaving out uldlcfg 2),
1066 * except that all 255s are now zeros. */
1067 PUBLIC RgSchTddSpsUlRsrvTbl rgSchTddSpsUlRsrvTbl = {
1068 {0, 0, 0, 6, 8, 0, 0, 0, 6, 8},
1069 {0, 0, 6, 9, 0, 0, 0, 6, 9, 0},
1070 {0, 0, 10, 0, 0, 0, 0, 10, 0, 0},
1071 {0, 0, 0, 0, 8, 0, 7, 7, 14, 0},
1072 {0, 0, 0, 9, 0, 0, 7, 15, 0, 0},
1073 {0, 0, 10, 0, 0, 0, 16, 0, 0, 0},
1074 {0, 0, 0, 0, 8, 0, 0, 0, 9, 0}
1077 /* Inverse DL Assoc Set index Table */
1078 PUBLIC RgSchTddInvDlAscSetIdxTbl rgSchTddInvDlAscSetIdxTbl = {
1079 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1080 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1081 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1082 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1083 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1084 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1085 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1088 #endif /* (LTEMAC_SPS ) */
1090 /* Number of Uplink subframes Table */
1091 PRIVATE U8 rgSchTddNumUlSf[] = {6, 4, 2, 3, 2, 1, 5};
1093 /* Downlink HARQ processes Table */
1094 PUBLIC RgSchTddUlNumHarqProcTbl rgSchTddUlNumHarqProcTbl = { 7, 4, 2, 3, 2, 1, 6};
1096 /* Uplink HARQ processes Table */
1097 PUBLIC RgSchTddDlNumHarqProcTbl rgSchTddDlNumHarqProcTbl = { 4, 7, 10, 9, 12, 15, 6};
1099 /* Downlink association index set 'K' value Table */
1100 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlAscSetIdxKTbl = {
1101 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1103 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1105 { {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}} },
1107 { {0, {0}}, {0, {0}}, {3, {7, 6, 11}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1109 { {0, {0}}, {0, {0}}, {4, {12, 8, 7, 11}}, {4, {6, 5, 4, 7}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1111 { {0, {0}}, {0, {0}}, {9, {13, 12, 9, 8, 7, 5, 4, 11, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1113 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1116 /* ccpu132282-ADD-the table rgSchTddDlAscSetIdxKTbl is rearranged in
1117 * decreasing order of Km, this is used to calculate the NCE used for
1118 * calculating N1Pucch Resource for Harq*/
1119 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlHqPucchResCalTbl = {
1120 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1122 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1124 { {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}} },
1126 { {0, {0}}, {0, {0}}, {3, {11, 7, 6}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1128 { {0, {0}}, {0, {0}}, {4, {12, 11, 8, 7}}, {4, {7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1130 { {0, {0}}, {0, {0}}, {9, {13, 12, 11, 9, 8, 7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1132 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1135 /* Minimum number of Ack/Nack feeback information to be
1136 stored for each UL-DL configuration */
1137 PUBLIC RgSchTddANFdbkMapTbl rgSchTddANFdbkMapTbl = {4, 4, 2, 3, 2, 1, 5};
1139 /* Uplink switch points and number of UL subframes Table */
1140 PUBLIC RgSchTddMaxUlSubfrmTbl rgSchTddMaxUlSubfrmTbl = {
1141 {2,3,3}, {2,2,2}, {2,1,1}, {1,3,0}, {1,2,0}, {1,1,0}, {2,3,2}
1144 /* Uplink switch points and number of DL subframes Table */
1145 PUBLIC RgSchTddMaxDlSubfrmTbl rgSchTddMaxDlSubfrmTbl = {
1146 {2,2,2}, {2,3,3}, {2,4,4}, {1,7,0}, {1,8,0}, {1,9,0}, {2,2,3}
1149 /* Number of UL subframes present before a particular subframe */
1150 PUBLIC RgSchTddNumUlSubfrmTbl rgSchTddNumUlSubfrmTbl = {
1151 {0, 0, 1, 2, 3, 3, 3, 4, 5, 6},
1152 {0, 0, 1, 2, 2, 2, 2, 3, 4, 4},
1153 {0, 0, 1, 1, 1, 1, 1, 2, 2, 2},
1154 {0, 0, 1, 2, 3, 3, 3, 3, 3, 3},
1155 {0, 0, 1, 2, 2, 2, 2, 2, 2, 2},
1156 {0, 0, 1, 1, 1, 1, 1, 1, 1, 1},
1157 {0, 0, 1, 2, 3, 3, 3, 4, 5, 5}
1160 /* Number of DL subframes present till a particular subframe */
1161 PUBLIC RgSchTddNumDlSubfrmTbl rgSchTddNumDlSubfrmTbl = {
1162 {1, 2, 2, 2, 2, 3, 4, 4, 4, 4},
1163 {1, 2, 2, 2, 3, 4, 5, 5, 5, 6},
1164 {1, 2, 2, 3, 4, 5, 6, 6, 7, 8},
1165 {1, 2, 2, 2, 2, 3, 4, 5, 6, 7},
1166 {1, 2, 2, 2, 3, 4, 5, 6, 7, 8},
1167 {1, 2, 2, 3, 4, 5, 6, 7, 8, 9},
1168 {1, 2, 2, 2, 2, 3, 4, 4, 4, 5}
1172 /* Nearest possible UL subframe Index from UL subframe
1173 * DL Index < UL Index */
1174 PUBLIC RgSchTddLowDlSubfrmIdxTbl rgSchTddLowDlSubfrmIdxTbl = {
1175 {0, 1, 1, 1, 1, 5, 6, 6, 6, 6},
1176 {0, 1, 1, 1, 4, 5, 6, 6, 6, 9},
1177 {0, 1, 1, 3, 4, 5, 6, 6, 8, 9},
1178 {0, 1, 1, 1, 1, 5, 6, 7, 8, 9},
1179 {0, 1, 1, 1, 4, 5, 6, 7, 8, 9},
1180 {0, 1, 1, 3, 4, 5, 6, 7, 8, 9},
1181 {0, 1, 1, 1, 1, 5, 6, 6, 6, 9}
1184 /* Nearest possible DL subframe Index from UL subframe
1185 * DL Index > UL Index
1186 * 10 represents Next SFN low DL Idx */
1187 PUBLIC RgSchTddHighDlSubfrmIdxTbl rgSchTddHighDlSubfrmIdxTbl = {
1188 {0, 1, 5, 5, 5, 5, 6, 10, 10, 10},
1189 {0, 1, 4, 4, 4, 5, 6, 9, 9, 9},
1190 {0, 1, 3, 3, 4, 5, 6, 8, 8, 9},
1191 {0, 1, 5, 5, 5, 5, 6, 7, 8, 9},
1192 {0, 1, 4, 4, 4, 5, 6, 7, 8, 9},
1193 {0, 1, 3, 3, 4, 5, 6, 7, 8, 9},
1194 {0, 1, 5, 5, 5, 5, 6, 9, 9, 9}
1197 /* RACH Message3 related information */
1198 PUBLIC RgSchTddMsg3SubfrmTbl rgSchTddMsg3SubfrmTbl = {
1199 {7, 6, 255, 255, 255, 7, 6, 255, 255, 255},
1200 {7, 6, 255, 255, 8, 7, 6, 255, 255, 8},
1201 {7, 6, 255, 9, 8, 7, 6, 255, 9, 8},
1202 {12, 11, 255, 255, 255, 7, 6, 6, 6, 13},
1203 {12, 11, 255, 255, 8, 7, 6, 6, 14, 13},
1204 {12, 11, 255, 9, 8, 7, 6, 15, 14, 13},
1205 {7, 6, 255, 255, 255, 7, 6, 255, 255, 8}
1208 /* ccpu00132341-DEL Removed rgSchTddRlsDlSubfrmTbl and used Kset table for
1209 * releasing DL HARQs */
1211 /* DwPTS Scheduling Changes Start */
1212 /* Provides the number of Cell Reference Signals in DwPTS
1214 PRIVATE U8 rgSchCmnDwptsCrs[2][3] = {/* [Spl Sf cfg][Ant Port] */
1215 {4, 8, 16}, /* Spl Sf cfg 1,2,3,6,7,8 */
1216 {6, 12, 20}, /* Spl Sf cfg 4 */
1219 PRIVATE S8 rgSchCmnSplSfDeltaItbs[9] = RG_SCH_DWPTS_ITBS_ADJ;
1220 /* DwPTS Scheduling Changes End */
1224 PRIVATE U32 rgSchCmnBsrTbl[64] = {
1225 0, 10, 12, 14, 17, 19, 22, 26,
1226 31, 36, 42, 49, 57, 67, 78, 91,
1227 107, 125, 146, 171, 200, 234, 274, 321,
1228 376, 440, 515, 603, 706, 826, 967, 1132,
1229 1326, 1552, 1817, 2127, 2490, 2915, 3413, 3995,
1230 4677, 5476, 6411, 7505, 8787, 10287, 12043, 14099,
1231 16507, 19325, 22624, 26487, 31009, 36304, 42502, 49759,
1232 58255, 68201, 79846, 93479, 109439, 128125, 150000, 220000
1235 PRIVATE U32 rgSchCmnExtBsrTbl[64] = {
1236 0, 10, 13, 16, 19, 23, 29, 35,
1237 43, 53, 65, 80, 98, 120, 147, 181,
1238 223, 274, 337, 414, 509, 625, 769, 945,
1239 1162, 1429, 1757, 2161, 2657, 3267, 4017, 4940,
1240 6074, 7469, 9185, 11294, 13888, 17077, 20999, 25822,
1241 31752, 39045, 48012, 59039, 72598, 89272, 109774, 134986,
1242 165989, 204111, 250990, 308634, 379519, 466683, 573866, 705666,
1243 867737, 1067031, 1312097, 1613447, 1984009, 2439678, 3000000, 3100000
1247 PRIVATE U8 rgSchCmnUlRvIdxToIMcsTbl[4] = {32, 30, 31, 29};
1249 PUBLIC U8 rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_MAX_CP][RG_SCH_CMN_UL_NUM_CQI];
1251 PUBLIC RgSchTbSzTbl rgTbSzTbl = {
1253 {16, 32, 56, 88, 120, 152, 176, 208, 224, 256, 288, 328, 344, 376, 392, 424, 456, 488, 504, 536, 568, 600, 616, 648, 680, 712, 744, 776, 776, 808, 840, 872, 904, 936, 968, 1000, 1032, 1032, 1064, 1096, 1128, 1160, 1192, 1224, 1256, 1256, 1288, 1320, 1352, 1384, 1416, 1416, 1480, 1480, 1544, 1544, 1608, 1608, 1608, 1672, 1672, 1736, 1736, 1800, 1800, 1800, 1864, 1864, 1928, 1928, 1992, 1992, 2024, 2088, 2088, 2088, 2152, 2152, 2216, 2216, 2280, 2280, 2280, 2344, 2344, 2408, 2408, 2472, 2472, 2536, 2536, 2536, 2600, 2600, 2664, 2664, 2728, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 2984, 2984, 3112},
1254 {24, 56, 88, 144, 176, 208, 224, 256, 328, 344, 376, 424, 456, 488, 520, 568, 600, 632, 680, 712, 744, 776, 808, 872, 904, 936, 968, 1000, 1032, 1064, 1128, 1160, 1192, 1224, 1256, 1288, 1352, 1384, 1416, 1416, 1480, 1544, 1544, 1608, 1608, 1672, 1736, 1736, 1800, 1800, 1864, 1864, 1928, 1992, 1992, 2024, 2088, 2088, 2152, 2152, 2216, 2280, 2280, 2344, 2344, 2408, 2472, 2472, 2536, 2536, 2600, 2600, 2664, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008},
1255 {32, 72, 144, 176, 208, 256, 296, 328, 376, 424, 472, 520, 568, 616, 648, 696, 744, 776, 840, 872, 936, 968, 1000, 1064, 1096, 1160, 1192, 1256, 1288, 1320, 1384, 1416, 1480, 1544, 1544, 1608, 1672, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2024, 2088, 2088, 2152, 2216, 2216, 2280, 2344, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2664, 2728, 2792, 2856, 2856, 2856, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968},
1256 {40, 104, 176, 208, 256, 328, 392, 440, 504, 568, 616, 680, 744, 808, 872, 904, 968, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1864, 1928, 1992, 2024, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6200, 6456, 6456},
1257 {56, 120, 208, 256, 328, 408, 488, 552, 632, 696, 776, 840, 904, 1000, 1064, 1128, 1192, 1288, 1352, 1416, 1480, 1544, 1608, 1736, 1800, 1864, 1928, 1992, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2600, 2664, 2728, 2792, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992},
1258 {72, 144, 224, 328, 424, 504, 600, 680, 776, 872, 968, 1032, 1128, 1224, 1320, 1384, 1480, 1544, 1672, 1736, 1864, 1928, 2024, 2088, 2216, 2280, 2344, 2472, 2536, 2664, 2728, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4264, 4392, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9528},
1259 {328, 176, 256, 392, 504, 600, 712, 808, 936, 1032, 1128, 1224, 1352, 1480, 1544, 1672, 1736, 1864, 1992, 2088, 2216, 2280, 2408, 2472, 2600, 2728, 2792, 2984, 2984, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3880, 4008, 4136, 4136, 4264, 4392, 4584, 4584, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448},
1260 {104, 224, 328, 472, 584, 712, 840, 968, 1096, 1224, 1320, 1480, 1608, 1672, 1800, 1928, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536},
1261 {120, 256, 392, 536, 680, 808, 968, 1096, 1256, 1384, 1544, 1672, 1800, 1928, 2088, 2216, 2344, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15264},
1262 {136, 296, 456, 616, 776, 936, 1096, 1256, 1416, 1544, 1736, 1864, 2024, 2216, 2344, 2536, 2664, 2856, 2984, 3112, 3368, 3496, 3624, 3752, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 5160, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568},
1263 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080},
1264 {176, 376, 584, 776, 1000, 1192, 1384, 1608, 1800, 2024, 2216, 2408, 2600, 2792, 2984, 3240, 3496, 3624, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152},
1265 {208, 440, 680, 904, 1128, 1352, 1608, 1800, 2024, 2280, 2472, 2728, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 23688, 24496, 24496, 24496, 24496, 25456},
1266 {224, 488, 744, 1000, 1256, 1544, 1800, 2024, 2280, 2536, 2856, 3112, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 26416, 27376, 27376, 27376, 27376, 28336, 28336},
1267 {256, 552, 840, 1128, 1416, 1736, 1992, 2280, 2600, 2856, 3112, 3496, 3752, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 5992, 6200, 6456, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704},
1268 {280, 600, 904, 1224, 1544, 1800, 2152, 2472, 2728, 3112, 3368, 3624, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 6200, 6456, 6712, 6968, 7224, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008},
1269 {328, 632, 968, 1288, 1608, 1928, 2280, 2600, 2984, 3240, 3624, 3880, 4264, 4584, 4968, 5160, 5544, 5992, 6200, 6456, 6712, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 35160},
1270 {336, 696, 1064, 1416, 1800, 2152, 2536, 2856, 3240, 3624, 4008, 4392, 4776, 5160, 5352, 5736, 6200, 6456, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 39232},
1271 {376, 776, 1160, 1544, 1992, 2344, 2792, 3112, 3624, 4008, 4392, 4776, 5160, 5544, 5992, 6200, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14112, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816},
1272 {408, 840, 1288, 1736, 2152, 2600, 2984, 3496, 3880, 4264, 4776, 5160, 5544, 5992, 6456, 6968, 7224, 7736, 8248, 8504, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19080, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888},
1273 {440, 904, 1384, 1864, 2344, 2792, 3240, 3752, 4136, 4584, 5160, 5544, 5992, 6456, 6968, 7480, 7992, 8248, 8760, 9144, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 20616, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 48936, 51024, 51024, 51024},
1274 {488, 1000, 1480, 1992, 2472, 2984, 3496, 4008, 4584, 4968, 5544, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 9912, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056},
1275 {520, 1064, 1608, 2152, 2664, 3240, 3752, 4264, 4776, 5352, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 10296, 10680, 11448, 11832, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 19080, 19080, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256},
1276 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776},
1277 {584, 1192, 1800, 2408, 2984, 3624, 4264, 4968, 5544, 5992, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 22920, 23688, 24496, 25456, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592},
1278 {616, 1256, 1864, 2536, 3112, 3752, 4392, 5160, 5736, 6200, 6968, 7480, 8248, 8760, 9528, 10296, 10680, 11448, 12216, 12576, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592, 68808, 68808, 68808, 71112},
1279 {712, 1480, 2216, 2984, 3752, 4392, 5160, 5992, 6712, 7480, 8248, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 14688, 15264, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376}
1282 {32, 88, 152, 208, 256, 328, 376, 424, 488, 536, 600, 648, 712, 776, 808, 872, 936, 1000, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2088, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 5992, 6200},
1283 {56, 144, 208, 256, 344, 424, 488, 568, 632, 712, 776, 872, 936, 1000, 1064, 1160, 1224, 1288, 1384, 1416, 1544, 1608, 1672, 1736, 1800, 1864, 1992, 2024, 2088, 2152, 2280, 2344, 2408, 2472, 2536, 2600, 2728, 2792, 2856, 2856, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5352, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992},
1284 {72, 176, 256, 328, 424, 520, 616, 696, 776, 872, 968, 1064, 1160, 1256, 1320, 1416, 1544, 1608, 1672, 1800, 1864, 1992, 2088, 2152, 2216, 2344, 2408, 2536, 2600, 2664, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3368, 3496, 3624, 3624, 3752, 3880, 4008, 4008, 4136, 4264, 4264, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912},
1285 {104, 208, 328, 440, 568, 680, 808, 904, 1032, 1160, 1256, 1384, 1480, 1608, 1736, 1864, 1992, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2856, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4392, 4584, 4776, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12576, 12960, 12960},
1286 {120, 256, 408, 552, 696, 840, 1000, 1128, 1288, 1416, 1544, 1736, 1864, 1992, 2152, 2280, 2408, 2600, 2728, 2856, 2984, 3112, 3240, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840},
1287 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19080},
1288 {176, 392, 600, 808, 1032, 1224, 1480, 1672, 1864, 2088, 2280, 2472, 2728, 2984, 3112, 3368, 3496, 3752, 4008, 4136, 4392, 4584, 4776, 4968, 5160, 5352, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7992, 8248, 8248, 8504, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920},
1289 {224, 472, 712, 968, 1224, 1480, 1672, 1928, 2216, 2472, 2664, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376},
1290 {256, 536, 808, 1096, 1384, 1672, 1928, 2216, 2536, 2792, 3112, 3368, 3624, 3880, 4264, 4584, 4776, 4968, 5352, 5544, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576},
1291 {296, 616, 936, 1256, 1544, 1864, 2216, 2536, 2856, 3112, 3496, 3752, 4136, 4392, 4776, 5160, 5352, 5736, 5992, 6200, 6712, 6968, 7224, 7480, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160},
1292 {328, 680, 1032, 1384, 1736, 2088, 2472, 2792, 3112, 3496, 3880, 4264, 4584, 4968, 5352, 5736, 5992, 6200, 6712, 6968, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 21384, 21384, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888},
1293 {376, 776, 1192, 1608, 2024, 2408, 2792, 3240, 3624, 4008, 4392, 4776, 5352, 5736, 5992, 6456, 6968, 7224, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816},
1294 {440, 904, 1352, 1800, 2280, 2728, 3240, 3624, 4136, 4584, 4968, 5544, 5992, 6456, 6712, 7224, 7736, 8248, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 17568, 17568, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024},
1295 {488, 1000, 1544, 2024, 2536, 3112, 3624, 4136, 4584, 5160, 5736, 6200, 6712, 7224, 7736, 8248, 8760, 9144, 9912, 10296, 10680, 11448, 11832, 12216, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336},
1296 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776},
1297 {600, 1224, 1800, 2472, 3112, 3624, 4264, 4968, 5544, 6200, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 23688, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808},
1298 {632, 1288, 1928, 2600, 3240, 3880, 4584, 5160, 5992, 6456, 7224, 7736, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 68808, 71112, 71112, 71112, 71112},
1299 {696, 1416, 2152, 2856, 3624, 4392, 5160, 5736, 6456, 7224, 7992, 8760, 9528, 10296, 10680, 11448, 12216, 12960, 13536, 14688, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 71112, 73712, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 78704},
1300 {776, 1544, 2344, 3112, 4008, 4776, 5544, 6200, 7224, 7992, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 27376, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936},
1301 {840, 1736, 2600, 3496, 4264, 5160, 5992, 6968, 7736, 8504, 9528, 10296, 11064, 12216, 12960, 13536, 14688, 15264, 16416, 16992, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800},
1302 {904, 1864, 2792, 3752, 4584, 5544, 6456, 7480, 8248, 9144, 10296, 11064, 12216, 12960, 14112, 14688, 15840, 16992, 17568, 18336, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 42368, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 97896, 101840, 101840, 101840},
1303 {1000, 1992, 2984, 4008, 4968, 5992, 6968, 7992, 9144, 9912, 11064, 12216, 12960, 14112, 15264, 15840, 16992, 18336, 19080, 19848, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 31704, 32856, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136},
1304 {1064, 2152, 3240, 4264, 5352, 6456, 7480, 8504, 9528, 10680, 11832, 12960, 14112, 15264, 16416, 16992, 18336, 19080, 20616, 21384, 22920, 23688, 24496, 25456, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816},
1305 {1128, 2280, 3496, 4584, 5736, 6968, 7992, 9144, 10296, 11448, 12576, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 45352, 45352, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840,101840,101840,101840,105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496},
1306 {1192, 2408, 3624, 4968, 5992, 7224, 8504, 9912, 11064, 12216, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 42368, 43816, 45352, 46888, 46888, 48936, 51024, 51024, 52752, 52752, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208},
1307 {1256, 2536, 3752, 5160, 6200, 7480, 8760, 10296, 11448, 12576, 14112, 15264, 16416, 17568, 19080, 20616, 21384, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 43816, 43816, 45352, 46888, 48936, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040,115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208, 137792, 137792, 137792, 142248},
1308 {1480, 2984, 4392, 5992, 7480, 8760, 10296, 11832, 13536, 14688, 16416, 17568, 19080, 20616, 22152, 23688, 25456, 26416, 28336, 29296, 30576, 32856, 34008, 35160, 36696, 37888, 40576, 40576, 42368, 43816, 45352, 46888, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 63776, 63776, 66592, 68808, 68808, 71112, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 81176, 84760, 84760, 87936, 87936, 90816, 90816, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 128496, 128496, 128496, 133208, 133208, 133208, 137792, 137792, 137792, 142248, 142248, 142248, 146856, 146856,149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776}
1311 RgSchUlIMcsTbl rgUlIMcsTbl = {
1312 {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5},
1313 {2, 6}, {2, 7}, {2, 8}, {2, 9}, {2, 10},
1314 {4, 10}, {4, 11}, {4, 12}, {4, 13}, {4, 14},
1315 {4, 15}, {4, 16}, {4, 17}, {4, 18}, {4, 19},
1316 {6, 19}, {6, 20}, {6, 21}, {6, 22}, {6, 23},
1317 {6, 24}, {6, 25}, {6, 26}
1319 RgSchUeCatTbl rgUeCatTbl = {
1320 /*Column1:Maximum number of bits of an UL-SCH
1321 transport block transmitted within a TTI
1323 Column2:Maximum number of bits of a DLSCH
1324 transport block received within a TTI
1326 Column3:Total number of soft channel bits
1328 Column4:Support for 64QAM in UL
1330 Column5:Maximum number of DL-SCH transport
1331 block bits received within a TTI
1333 Column6:Maximum number of supported layers for
1334 spatial multiplexing in DL
1336 {5160, {10296,0}, 250368, FALSE, 10296, 1},
1337 {25456, {51024,0}, 1237248, FALSE, 51024, 2},
1338 {51024, {75376,0}, 1237248, FALSE, 102048, 2},
1339 {51024, {75376,0}, 1827072, FALSE, 150752, 2},
1340 {75376, {149776,0}, 3667200, TRUE, 299552, 4},
1341 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1342 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1343 {149776,{299856,0}, 35982720,TRUE, 2998560, 8}
1346 /* [ccpu00138532]-ADD-The below table stores the min HARQ RTT time
1347 in Downlink for TDD and FDD. Indices 0 to 6 map to tdd UL DL config 0-6.
1348 Index 7 map to FDD */
1349 U8 rgSchCmnHarqRtt[8] = {4,7,10,9,12,15,6,8};
1350 /* Number of CFI Switchover Index is equals to 7 TDD Indexes + 1 FDD index */
1351 U8 rgSchCfiSwitchOvrWinLen[] = {7, 4, 2, 3, 2, 1, 6, 8};
1353 /* EffTbl is calculated for single layer and two layers.
1354 * CqiToTbs is calculated for single layer and two layers */
1355 RgSchCmnTbSzEff rgSchCmnNorCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1356 RgSchCmnTbSzEff rgSchCmnNorCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1357 /* New variable to store UL effiency values for normal and extended CP*/
1358 RgSchCmnTbSzEff rgSchCmnNorUlEff[1],rgSchCmnExtUlEff[1];
1359 RgSchCmnCqiToTbs rgSchCmnNorCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1360 RgSchCmnCqiToTbs rgSchCmnNorCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1361 RgSchCmnCqiToTbs *rgSchCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
1362 RgSchCmnTbSzEff rgSchCmnExtCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1363 RgSchCmnTbSzEff rgSchCmnExtCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1364 RgSchCmnCqiToTbs rgSchCmnExtCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1365 RgSchCmnCqiToTbs rgSchCmnExtCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1366 /* Include CRS REs while calculating Efficiency */
1367 RgSchCmnTbSzEff *rgSchCmnEffTbl[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_ANT_CONF][RG_SCH_CMN_MAX_CFI];
1368 RgSchCmnTbSzEff *rgSchCmnUlEffTbl[RG_SCH_CMN_MAX_CP];
1370 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3, 1};
1372 /* Added matrix 'rgRaPrmblToRaFrmTbl'for computation of RA sub-frames from RA preamble */
1373 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3};
1376 EXTERN RgUlSchdInits rgSchUlSchdInits;
1377 EXTERN RgDlSchdInits rgSchDlSchdInits;
1378 EXTERN RgDlfsSchdInits rgSchDlfsSchdInits;
1380 EXTERN RgEmtcUlSchdInits rgSchEmtcUlSchdInits;
1381 EXTERN RgEmtcDlSchdInits rgSchEmtcDlSchdInits;
1385 PRIVATE S16 rgSCHCmnUeIdleExdThrsld ARGS((
1389 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe ARGS((
1393 PRIVATE Void rgSCHCmnDelDedPreamble ARGS((
1397 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe ARGS((
1400 CmLteTimingInfo timingInfo
1402 PRIVATE Void rgSCHCmnDelRachInfo ARGS((
1406 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe ARGS((
1412 PRIVATE Void rgSCHCmnHdlHoPo ARGS((
1414 CmLListCp *raRspLst,
1415 RgSchRaReqInfo *raReq
1417 PRIVATE Void rgSCHCmnAllocPoHoGrnt ARGS((
1419 CmLListCp *raRspLst,
1421 RgSchRaReqInfo *raReq
1423 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf ARGS((
1430 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ ARGS((
1434 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ ARGS((
1438 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx ARGS((
1441 PRIVATE Void rgSCHCmnUpdRachParam ARGS((
1444 PRIVATE S16 rgSCHCmnAllocPOParam ARGS((
1452 PRIVATE Void rgSCHCmnGenPdcchOrder ARGS((
1456 PRIVATE Void rgSCHCmnCfgRachDedPrm ARGS((
1461 PRIVATE Void rgSCHCmnHdlUlInactUes ARGS((
1464 PRIVATE Void rgSCHCmnHdlDlInactUes ARGS((
1467 PRIVATE Void rgSCHCmnUlInit ARGS((Void
1469 PRIVATE Void rgSCHCmnDlInit ARGS((Void
1471 PRIVATE Void rgSCHCmnInitDlRbAllocInfo ARGS((
1472 RgSchCmnDlRbAllocInfo *allocInfo
1474 PRIVATE Void rgSCHCmnUpdUlCompEffBsr ARGS((
1478 PRIVATE Void rgSCHCmnUlSetAllUnSched ARGS((
1479 RgSchCmnUlRbAllocInfo *allocInfo
1481 PRIVATE Void rgSCHCmnUlUpdSf ARGS((
1483 RgSchCmnUlRbAllocInfo *allocInfo,
1486 PRIVATE Void rgSCHCmnUlHndlAllocRetx ARGS((
1488 RgSchCmnUlRbAllocInfo *allocInfo,
1493 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch ARGS((
1497 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch ARGS((
1501 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ ARGS((
1505 PRIVATE S16 rgSCHCmnTmrExpiry ARGS((
1506 PTR cb, /* Pointer to timer control block */
1507 S16 tmrEvnt /* Timer Event */
1509 PRIVATE S16 rgSCHCmnTmrProc ARGS((
1512 PRIVATE Void rgSCHCmnAddUeToRefreshQ ARGS((
1517 PRIVATE Void rgSCHCmnDlCcchRetx ARGS((
1519 RgSchCmnDlRbAllocInfo *allocInfo
1521 PRIVATE Void rgSCHCmnUpdUeMimoInfo ARGS((
1525 RgSchCmnCell *cellSchd
1527 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo ARGS((
1531 RgSchCmnUe *ueSchCmn,
1532 RgSchCmnCell *cellSchd,
1536 PRIVATE Void rgSCHCmnDlCcchSduRetx ARGS((
1538 RgSchCmnDlRbAllocInfo *allocInfo
1540 PRIVATE Void rgSCHCmnDlCcchSduTx ARGS((
1542 RgSchCmnDlRbAllocInfo *allocInfo
1544 PRIVATE S16 rgSCHCmnCcchSduAlloc ARGS((
1547 RgSchCmnDlRbAllocInfo *allocInfo
1549 PRIVATE S16 rgSCHCmnCcchSduDedAlloc ARGS((
1553 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc ARGS((
1559 PRIVATE Void rgSCHCmnInitVars ARGS((
1563 /*ccpu00117180 - DEL - Moved rgSCHCmnUpdVars to .x as its access is now PUBLIC */
1564 PRIVATE Void rgSCHCmnUlRbAllocForLst ARGS((
1570 CmLListCp *nonSchdLst,
1573 PRIVATE S16 rgSCHCmnUlRbAllocForUe ARGS((
1580 PRIVATE Void rgSCHCmnMsg3GrntReq ARGS((
1584 RgSchUlHqProcCb *hqProc,
1585 RgSchUlAlloc **ulAllocRef,
1588 PRIVATE Void rgSCHCmnUlNonadapRetx ARGS((
1589 RgSchCmnUlCell *cellUl,
1590 RgSchUlAlloc *alloc,
1594 PRIVATE Void rgSCHCmnDlCcchRarAlloc ARGS((
1597 PRIVATE Void rgSCHCmnDlCcchTx ARGS((
1599 RgSchCmnDlRbAllocInfo *allocInfo
1601 PRIVATE Void rgSCHCmnDlBcchPcch ARGS((
1603 RgSchCmnDlRbAllocInfo *allocInfo,
1604 RgInfSfAlloc *subfrmAlloc
1606 PUBLIC Bool rgSCHCmnChkInWin ARGS((
1607 CmLteTimingInfo frm,
1608 CmLteTimingInfo start,
1611 PUBLIC Bool rgSCHCmnChkPastWin ARGS((
1612 CmLteTimingInfo frm,
1615 PRIVATE Void rgSCHCmnClcAlloc ARGS((
1618 RgSchClcDlLcCb *lch,
1620 RgSchCmnDlRbAllocInfo *allocInfo
1623 PRIVATE Void rgSCHCmnClcRbAlloc ARGS((
1634 PRIVATE S16 rgSCHCmnMsg4Alloc ARGS((
1637 RgSchCmnDlRbAllocInfo *allocInfo
1639 PRIVATE S16 rgSCHCmnMsg4DedAlloc ARGS((
1643 PRIVATE Void rgSCHCmnDlRaRsp ARGS((
1645 RgSchCmnDlRbAllocInfo *allocInfo
1647 PRIVATE S16 rgSCHCmnRaRspAlloc ARGS((
1653 RgSchCmnDlRbAllocInfo *allocInfo
1655 PRIVATE Void rgSCHCmnUlUeDelAllocs ARGS((
1659 PRIVATE Void rgSCHCmnDlSetUeAllocLmt ARGS((
1664 PRIVATE S16 rgSCHCmnDlRgrCellCfg ARGS((
1669 PRIVATE Void rgSCHCmnUlAdapRetx ARGS((
1670 RgSchUlAlloc *alloc,
1671 RgSchUlHqProcCb *proc
1673 PRIVATE Void rgSCHCmnUlUpdAllocRetx ARGS((
1677 PRIVATE Void rgSCHCmnUlSfReTxAllocs ARGS((
1681 /* Fix: syed Adaptive Msg3 Retx crash. */
1682 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs ARGS((
1688 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1692 RgrUeRecfg *ueRecfg,
1696 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1706 * DL RB allocation specific functions
1709 PRIVATE Void rgSCHCmnDlRbAlloc ARGS((
1711 RgSchCmnDlRbAllocInfo *allocInfo
1713 PRIVATE Void rgSCHCmnNonDlfsRbAlloc ARGS((
1715 RgSchCmnDlRbAllocInfo *allocInfo
1717 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc ARGS((
1719 RgSchDlRbAlloc *cmnAllocInfo));
1722 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj ARGS((
1724 RgSchDlRbAlloc *cmnAllocInfo,
1728 /* Added function to adjust TBSize*/
1729 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj ARGS((
1730 RgSchDlRbAlloc *allocInfo,
1731 U8 numOvrlapgPbchRb,
1737 /* Added function to find num of overlapping PBCH rb*/
1738 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs ARGS((
1741 RgSchDlRbAlloc *allocInfo,
1742 U8 *numOvrlapgPbchRb
1745 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl ARGS((
1748 RgSchDlRbAlloc *allocInfo
1751 PRIVATE Void rgSCHCmnFindCodeRate ARGS((
1754 RgSchDlRbAlloc *allocInfo,
1759 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc ARGS((
1761 RgSchCmnMsg4RbAlloc *msg4AllocInfo,
1764 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc ARGS((
1770 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc ARGS((
1777 PRIVATE U32 rgSCHCmnCalcRiv ARGS(( U8 bw,
1783 PRIVATE Void rgSCHCmnUpdHqAndDai ARGS((
1784 RgSchDlHqProcCb *hqP,
1786 RgSchDlHqTbCb *tbCb,
1789 PRIVATE S16 rgSCHCmnUlCalcAvailBw ARGS((
1791 RgrCellCfg *cellCfg,
1796 PRIVATE S16 rgSCHCmnDlKdashUlAscInit ARGS((
1799 PRIVATE S16 rgSCHCmnDlANFdbkInit ARGS((
1802 PRIVATE S16 rgSCHCmnDlNpValInit ARGS((
1805 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst ARGS((
1808 PRIVATE S16 rgSCHCmnDlCpyRachInfo ARGS((
1810 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
1813 PRIVATE S16 rgSCHCmnDlRachInfoInit ARGS((
1816 PRIVATE S16 rgSCHCmnDlPhichOffsetInit ARGS((
1821 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt ARGS
1827 PRIVATE RgSchCmnRank rgSCHCmnComputeRank ARGS
1834 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3 ARGS
1839 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3 ARGS
1844 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4 ARGS
1849 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4 ARGS
1854 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr ARGS
1861 /* comcodsepa : start */
1864 * @brief This function computes efficiency and stores in a table.
1868 * Function: rgSCHCmnCompEff
1869 * Purpose: this function computes the efficiency as number of
1870 * bytes per 1024 symbols. The CFI table is also filled
1871 * with the same information such that comparison is valid
1873 * Invoked by: Scheduler
1875 * @param[in] U8 noPdcchSym
1876 * @param[in] U8 cpType
1877 * @param[in] U8 txAntIdx
1878 * @param[in] RgSchCmnTbSzEff* effTbl
1883 PRIVATE Void rgSCHCmnCompEff
1888 RgSchCmnTbSzEff *effTbl
1891 PRIVATE Void rgSCHCmnCompEff(noPdcchSym, cpType, txAntIdx, effTbl)
1895 RgSchCmnTbSzEff *effTbl;
1900 U8 resOfCrs; /* Effective REs occupied by CRS */
1903 TRC2(rgSCHCmnCompEff);
1907 case RG_SCH_CMN_NOR_CP:
1910 case RG_SCH_CMN_EXT_CP:
1914 /* Generate a log error. This case should never be executed */
1918 /* Depending on the Tx Antenna Index, deduct the
1919 * Resource elements for the CRS */
1923 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
1926 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
1929 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
1932 /* Generate a log error. This case should never be executed */
1935 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
1936 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
1939 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
1941 /* This line computes the coding efficiency per 1024 REs */
1942 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
1944 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
1949 * @brief This function computes efficiency and stores in a table.
1953 * Function: rgSCHCmnCompUlEff
1954 * Purpose: this function computes the efficiency as number of
1955 * bytes per 1024 symbols. The CFI table is also filled
1956 * with the same information such that comparison is valid
1958 * Invoked by: Scheduler
1960 * @param[in] U8 noUlRsSym
1961 * @param[in] U8 cpType
1962 * @param[in] U8 txAntIdx
1963 * @param[in] RgSchCmnTbSzEff* effTbl
1968 PRIVATE Void rgSCHCmnCompUlEff
1972 RgSchCmnTbSzEff *effTbl
1975 PRIVATE Void rgSCHCmnCompUlEff(noUlRsSym, cpType, effTbl)
1978 RgSchCmnTbSzEff *effTbl;
1985 TRC2(rgSCHCmnCompUlEff);
1989 case RG_SCH_CMN_NOR_CP:
1992 case RG_SCH_CMN_EXT_CP:
1996 /* Generate a log error. This case should never be executed */
2000 noResPerRb = ((noSymPerRb - noUlRsSym) * RB_SCH_CMN_NUM_SCS_PER_RB);
2001 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2004 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2006 /* This line computes the coding efficiency per 1024 REs */
2007 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
2009 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
2015 * @brief This function computes efficiency for 2 layers and stores in a table.
2019 * Function: rgSCHCmn2LyrCompEff
2020 * Purpose: this function computes the efficiency as number of
2021 * bytes per 1024 symbols. The CFI table is also filled
2022 * with the same information such that comparison is valid
2024 * Invoked by: Scheduler
2026 * @param[in] U8 noPdcchSym
2027 * @param[in] U8 cpType
2028 * @param[in] U8 txAntIdx
2029 * @param[in] RgSchCmnTbSzEff* effTbl2Lyr
2034 PRIVATE Void rgSCHCmn2LyrCompEff
2039 RgSchCmnTbSzEff *effTbl2Lyr
2042 PRIVATE Void rgSCHCmn2LyrCompEff(noPdcchSym, cpType, txAntIdx, effTbl2Lyr)
2046 RgSchCmnTbSzEff *effTbl2Lyr;
2051 U8 resOfCrs; /* Effective REs occupied by CRS */
2054 TRC2(rgSCHCmn2LyrCompEff);
2058 case RG_SCH_CMN_NOR_CP:
2061 case RG_SCH_CMN_EXT_CP:
2065 /* Generate a log error. This case should never be executed */
2069 /* Depending on the Tx Antenna Index, deduct the
2070 * Resource elements for the CRS */
2074 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
2077 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
2080 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
2083 /* Generate a log error. This case should never be executed */
2087 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
2088 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2090 (*effTbl2Lyr)[i] = 0;
2091 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2093 /* This line computes the coding efficiency per 1024 REs */
2094 (*effTbl2Lyr)[i] += (rgTbSzTbl[1][i][j] * 1024) / (noResPerRb * (j+1));
2096 (*effTbl2Lyr)[i] /= RG_SCH_CMN_NUM_RBS;
2103 * @brief This function initializes the rgSchCmnDciFrmtSizes table.
2107 * Function: rgSCHCmnGetDciFrmtSizes
2108 * Purpose: This function determines the sizes of all
2109 * the available DCI Formats. The order of
2110 * bits addition for each format is inaccordance
2112 * Invoked by: rgSCHCmnRgrCellCfg
2118 PRIVATE Void rgSCHCmnGetDciFrmtSizes
2123 PRIVATE Void rgSCHCmnGetDciFrmtSizes(cell)
2128 TRC2(rgSCHCmnGetDciFrmtSizes);
2130 /* DCI Format 0 size determination */
2131 rgSchCmnDciFrmtSizes[0] = 1 +
2133 rgSCHUtlLog32bitNbase2((cell->bwCfg.ulTotalBw * \
2134 (cell->bwCfg.ulTotalBw + 1))/2) +
2144 /* DCI Format 1 size determination */
2145 rgSchCmnDciFrmtSizes[1] = 1 +
2146 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2151 4 + 2 + /* HqProc Id and DAI */
2157 /* DCI Format 1A size determination */
2158 rgSchCmnDciFrmtSizes[2] = 1 + /* Flag for format0/format1a differentiation */
2159 1 + /* Localized/distributed VRB assignment flag */
2162 3 + /* Harq process Id */
2164 4 + /* Harq process Id */
2165 2 + /* UL Index or DAI */
2167 1 + /* New Data Indicator */
2170 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2171 (cell->bwCfg.dlTotalBw + 1))/2);
2172 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
2173 Since VRB is local */
2175 /* DCI Format 1B size determination */
2176 rgSchCmnDciFrmtSizes[3] = 1 +
2177 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2178 (cell->bwCfg.dlTotalBw + 1))/2) +
2188 ((cell->numTxAntPorts == 4)? 4:2) +
2191 /* DCI Format 1C size determination */
2192 /* Approximation: NDLVrbGap1 ~= Nprb for DL */
2193 rgSchCmnDciFrmtSizes[4] = (cell->bwCfg.dlTotalBw < 50)? 0:1 +
2194 (cell->bwCfg.dlTotalBw < 50)?
2195 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/2 * \
2196 (cell->bwCfg.dlTotalBw/2 + 1))/2)) :
2197 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/4 * \
2198 (cell->bwCfg.dlTotalBw/4 + 1))/2)) +
2201 /* DCI Format 1D size determination */
2202 rgSchCmnDciFrmtSizes[5] = 1 +
2203 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2204 (cell->bwCfg.dlTotalBw + 1))/2) +
2213 ((cell->numTxAntPorts == 4)? 4:2) +
2216 /* DCI Format 2 size determination */
2217 rgSchCmnDciFrmtSizes[6] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2218 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2226 ((cell->numTxAntPorts == 4)? 6:3);
2228 /* DCI Format 2A size determination */
2229 rgSchCmnDciFrmtSizes[7] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2230 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2238 ((cell->numTxAntPorts == 4)? 2:0);
2240 /* DCI Format 3 size determination */
2241 rgSchCmnDciFrmtSizes[8] = rgSchCmnDciFrmtSizes[0];
2243 /* DCI Format 3A size determination */
2244 rgSchCmnDciFrmtSizes[9] = rgSchCmnDciFrmtSizes[0];
2251 * @brief This function initializes the cmnCell->dciAggrLvl table.
2255 * Function: rgSCHCmnGetCqiDciFrmt2AggrLvl
2256 * Purpose: This function determines the Aggregation level
2257 * for each CQI level against each DCI format.
2258 * Invoked by: rgSCHCmnRgrCellCfg
2264 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl
2269 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl(cell)
2273 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2277 TRC2(rgSCHCmnGetCqiDciFrmt2AggrLvl);
2279 for (i = 0; i < RG_SCH_CMN_MAX_CQI; i++)
2281 for (j = 0; j < 10; j++)
2283 U32 pdcchBits; /* Actual number of phy bits needed for a given DCI Format
2284 * for a given CQI Level */
2285 pdcchBits = (rgSchCmnDciFrmtSizes[j] * 1024)/rgSchCmnCqiPdcchEff[i];
2287 if (pdcchBits < 192)
2289 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL2;
2292 if (pdcchBits < 384)
2294 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL4;
2297 if (pdcchBits < 768)
2299 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL8;
2302 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL16;
2309 * @brief This function initializes all the data for the scheduler.
2313 * Function: rgSCHCmnDlInit
2314 * Purpose: This function initializes the following information:
2315 * 1. Efficiency table
2316 * 2. CQI to table index - It is one row for upto 3 RBs
2317 * and another row for greater than 3 RBs
2318 * currently extended prefix is compiled out.
2319 * Invoked by: MAC intialization code..may be ActvInit
2325 PRIVATE Void rgSCHCmnDlInit
2329 PRIVATE Void rgSCHCmnDlInit()
2336 RgSchCmnTbSzEff *effTbl;
2337 RgSchCmnCqiToTbs *tbsTbl;
2339 TRC2(rgSCHCmnDlInit);
2341 /* 0 corresponds to Single layer case, 1 corresponds to 2 layers case*/
2342 /* Init Efficiency table for normal cyclic prefix */
2343 /*Initialize Efficiency table for Layer Index 0 */
2344 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2345 /*Initialize Efficiency table for each of the CFI indices. The
2346 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2347 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[0];
2348 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[0];
2349 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[0];
2350 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[0];
2351 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2352 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[0];
2353 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[0];
2354 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[0];
2355 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[0];
2356 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2357 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[0];
2358 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[0];
2359 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[0];
2360 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[0];
2362 /*Initialize CQI to TBS table for Layer Index 0 for Normal CP */
2363 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[0];
2364 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[0];
2365 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[0];
2366 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[0];
2368 /*Intialize Efficency table for Layer Index 1 */
2369 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2370 /*Initialize Efficiency table for each of the CFI indices. The
2371 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2372 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[1];
2373 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[1];
2374 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[1];
2375 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[1];
2376 /*Initialize Efficiency table for Tx Antenna Port Index 1 */
2377 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[1];
2378 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[1];
2379 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[1];
2380 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[1];
2381 /*Initialize Efficiency table for Tx Antenna Port Index 2 */
2382 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[1];
2383 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[1];
2384 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[1];
2385 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[1];
2387 /*Initialize CQI to TBS table for Layer Index 1 for Normal CP */
2388 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[1];
2389 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[1];
2390 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[1];
2391 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[1];
2393 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2395 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2397 /* EfficiencyTbl calculation incase of 2 layers for normal CP */
2398 rgSCHCmnCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx,\
2399 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i]);
2400 rgSCHCmn2LyrCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx, \
2401 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i]);
2405 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2407 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2409 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i];
2410 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][i];
2411 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2412 (j >= 0) && (k > 0); --j)
2414 /* ADD CQI to MCS mapping correction
2415 * single dimensional array is replaced by 2 dimensions for different CFI*/
2416 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2418 (*tbsTbl)[k--] = (U8)j;
2425 /* effTbl,tbsTbl calculation incase of 2 layers for normal CP */
2426 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i];
2427 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][i];
2428 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2429 (j >= 0) && (k > 0); --j)
2431 /* ADD CQI to MCS mapping correction
2432 * single dimensional array is replaced by 2 dimensions for different CFI*/
2433 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2435 (*tbsTbl)[k--] = (U8)j;
2445 /* Efficiency Table for Extended CP */
2446 /*Initialize Efficiency table for Layer Index 0 */
2447 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2448 /*Initialize Efficiency table for each of the CFI indices. The
2449 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2450 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[0];
2451 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[0];
2452 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[0];
2453 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[0];
2454 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2455 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[0];
2456 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[0];
2457 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[0];
2458 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[0];
2459 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2460 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[0];
2461 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[0];
2462 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[0];
2463 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[0];
2465 /*Initialize CQI to TBS table for Layer Index 0 for Extended CP */
2466 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[0];
2467 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[0];
2468 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[0];
2469 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[0];
2471 /*Initialize Efficiency table for Layer Index 1 */
2472 /*Initialize Efficiency table for each of the CFI indices. The
2473 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2474 /*Initialize Efficency table for Tx Antenna Port Index 0 */
2475 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[1];
2476 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[1];
2477 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[1];
2478 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[1];
2479 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2480 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[1];
2481 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[1];
2482 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[1];
2483 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[1];
2484 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2485 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[1];
2486 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[1];
2487 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[1];
2488 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[1];
2490 /*Initialize CQI to TBS table for Layer Index 1 for Extended CP */
2491 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[1];
2492 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[1];
2493 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[1];
2494 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[1];
2495 /* Activate this code when extended cp is supported */
2496 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2498 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2500 /* EfficiencyTbl calculation incase of 2 layers for extendedl CP */
2501 rgSCHCmnCompEff( (U8)(i + 1 ), (U8)RG_SCH_CMN_EXT_CP, idx,\
2502 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i]);
2503 rgSCHCmn2LyrCompEff((U8)(i + 1), (U8) RG_SCH_CMN_EXT_CP,idx, \
2504 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i]);
2508 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2510 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2512 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i];
2513 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][i];
2514 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2515 (j >= 0) && (k > 0); --j)
2517 /* ADD CQI to MCS mapping correction
2518 * single dimensional array is replaced by 2 dimensions for different CFI*/
2519 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2521 (*tbsTbl)[k--] = (U8)j;
2528 /* effTbl,tbsTbl calculation incase of 2 layers for extended CP */
2529 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i];
2530 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][i];
2531 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2532 (j >= 0) && (k > 0); --j)
2534 /* ADD CQI to MCS mapping correction
2535 * single dimensional array is replaced by 2 dimensions for different CFI*/
2536 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2538 (*tbsTbl)[k--] = (U8)j;
2551 * @brief This function initializes all the data for the scheduler.
2555 * Function: rgSCHCmnUlInit
2556 * Purpose: This function initializes the following information:
2557 * 1. Efficiency table
2558 * 2. CQI to table index - It is one row for upto 3 RBs
2559 * and another row for greater than 3 RBs
2560 * currently extended prefix is compiled out.
2561 * Invoked by: MAC intialization code..may be ActvInit
2567 PRIVATE Void rgSCHCmnUlInit
2571 PRIVATE Void rgSCHCmnUlInit()
2574 U8 *mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_NOR_CP][0];
2575 RgSchCmnTbSzEff *effTbl = &rgSchCmnNorUlEff[0];
2576 CONSTANT RgSchCmnUlCqiInfo *cqiTbl = &rgSchCmnUlCqiTbl[0];
2579 TRC2(rgSCHCmnUlInit);
2581 /* Initaializing new variable added for UL eff */
2582 rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP] = &rgSchCmnNorUlEff[0];
2583 /* Reason behind using 3 as the number of symbols to rule out for
2584 * efficiency table computation would be that we are using 2 symbols for
2585 * DMRS(1 in each slot) and 1 symbol for SRS*/
2586 rgSCHCmnCompUlEff(RGSCH_UL_SYM_DMRS_SRS,RG_SCH_CMN_NOR_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP]);
2588 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2589 i >= 0 && j > 0; --i)
2591 if ((*effTbl)[i] <= cqiTbl[j].eff)
2593 mapTbl[j--] = (U8)i;
2600 effTbl = &rgSchCmnExtUlEff[0];
2601 mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_EXT_CP][0];
2603 /* Initaializing new variable added for UL eff */
2604 rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP] = &rgSchCmnExtUlEff[0];
2605 /* Reason behind using 3 as the number of symbols to rule out for
2606 * efficiency table computation would be that we are using 2 symbols for
2607 * DMRS(1 in each slot) and 1 symbol for SRS*/
2608 rgSCHCmnCompUlEff(3,RG_SCH_CMN_EXT_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP]);
2610 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2611 i >= 0 && j > 0; --i)
2613 if ((*effTbl)[i] <= cqiTbl[j].eff)
2615 mapTbl[j--] = (U8)i;
2628 * @brief This function initializes all the data for the scheduler.
2632 * Function: rgSCHCmnInit
2633 * Purpose: This function initializes the following information:
2634 * 1. Efficiency table
2635 * 2. CQI to table index - It is one row for upto 3 RBs
2636 * and another row for greater than 3 RBs
2637 * currently extended prefix is compiled out.
2638 * Invoked by: MAC intialization code..may be ActvInit
2644 PUBLIC Void rgSCHCmnInit
2648 PUBLIC Void rgSCHCmnInit()
2657 rgSCHEmtcCmnDlInit();
2658 rgSCHEmtcCmnUlInit();
2664 /* Init the function pointers */
2665 rgSchCmnApis.rgSCHRgrUeCfg = rgSCHCmnRgrUeCfg;
2666 rgSchCmnApis.rgSCHRgrUeRecfg = rgSCHCmnRgrUeRecfg;
2667 rgSchCmnApis.rgSCHFreeUe = rgSCHCmnUeDel;
2668 rgSchCmnApis.rgSCHRgrCellCfg = rgSCHCmnRgrCellCfg;
2669 rgSchCmnApis.rgSCHRgrCellRecfg = rgSCHCmnRgrCellRecfg;
2670 rgSchCmnApis.rgSCHFreeCell = rgSCHCmnCellDel;
2671 rgSchCmnApis.rgSCHRgrLchCfg = rgSCHCmnRgrLchCfg;
2672 rgSchCmnApis.rgSCHRgrLcgCfg = rgSCHCmnRgrLcgCfg;
2673 rgSchCmnApis.rgSCHRgrLchRecfg = rgSCHCmnRgrLchRecfg;
2674 rgSchCmnApis.rgSCHRgrLcgRecfg = rgSCHCmnRgrLcgRecfg;
2675 rgSchCmnApis.rgSCHFreeDlLc = rgSCHCmnFreeDlLc;
2676 rgSchCmnApis.rgSCHFreeLcg = rgSCHCmnLcgDel;
2677 rgSchCmnApis.rgSCHRgrLchDel = rgSCHCmnRgrLchDel;
2678 rgSchCmnApis.rgSCHActvtUlUe = rgSCHCmnActvtUlUe;
2679 rgSchCmnApis.rgSCHActvtDlUe = rgSCHCmnActvtDlUe;
2680 rgSchCmnApis.rgSCHHdlUlTransInd = rgSCHCmnHdlUlTransInd;
2681 rgSchCmnApis.rgSCHDlDedBoUpd = rgSCHCmnDlDedBoUpd;
2682 rgSchCmnApis.rgSCHUlRecMsg3Alloc = rgSCHCmnUlRecMsg3Alloc;
2683 rgSchCmnApis.rgSCHUlCqiInd = rgSCHCmnUlCqiInd;
2684 rgSchCmnApis.rgSCHPucchDeltaPwrInd = rgSCHPwrPucchDeltaInd;
2685 rgSchCmnApis.rgSCHUlHqProcForUe = rgSCHCmnUlHqProcForUe;
2687 rgSchCmnApis.rgSCHUpdUlHqProc = rgSCHCmnUpdUlHqProc;
2689 rgSchCmnApis.rgSCHUpdBsrShort = rgSCHCmnUpdBsrShort;
2690 rgSchCmnApis.rgSCHUpdBsrTrunc = rgSCHCmnUpdBsrTrunc;
2691 rgSchCmnApis.rgSCHUpdBsrLong = rgSCHCmnUpdBsrLong;
2692 rgSchCmnApis.rgSCHUpdPhr = rgSCHCmnUpdPhr;
2693 rgSchCmnApis.rgSCHUpdExtPhr = rgSCHCmnUpdExtPhr;
2694 rgSchCmnApis.rgSCHContResUlGrant = rgSCHCmnContResUlGrant;
2695 rgSchCmnApis.rgSCHSrRcvd = rgSCHCmnSrRcvd;
2696 rgSchCmnApis.rgSCHFirstRcptnReq = rgSCHCmnFirstRcptnReq;
2697 rgSchCmnApis.rgSCHNextRcptnReq = rgSCHCmnNextRcptnReq;
2698 rgSchCmnApis.rgSCHFirstHqFdbkAlloc = rgSCHCmnFirstHqFdbkAlloc;
2699 rgSchCmnApis.rgSCHNextHqFdbkAlloc = rgSCHCmnNextHqFdbkAlloc;
2700 rgSchCmnApis.rgSCHDlProcAddToRetx = rgSCHCmnDlProcAddToRetx;
2701 rgSchCmnApis.rgSCHDlCqiInd = rgSCHCmnDlCqiInd;
2703 rgSchCmnApis.rgSCHUlProcAddToRetx = rgSCHCmnEmtcUlProcAddToRetx;
2706 rgSchCmnApis.rgSCHSrsInd = rgSCHCmnSrsInd;
2708 rgSchCmnApis.rgSCHDlTARpt = rgSCHCmnDlTARpt;
2709 rgSchCmnApis.rgSCHDlRlsSubFrm = rgSCHCmnDlRlsSubFrm;
2710 rgSchCmnApis.rgSCHUeReset = rgSCHCmnUeReset;
2712 rgSchCmnApis.rgSCHHdlCrntiCE = rgSCHCmnHdlCrntiCE;
2713 rgSchCmnApis.rgSCHDlProcAck = rgSCHCmnDlProcAck;
2714 rgSchCmnApis.rgSCHDlRelPdcchFbk = rgSCHCmnDlRelPdcchFbk;
2715 rgSchCmnApis.rgSCHUlSpsRelInd = rgSCHCmnUlSpsRelInd;
2716 rgSchCmnApis.rgSCHUlSpsActInd = rgSCHCmnUlSpsActInd;
2717 rgSchCmnApis.rgSCHUlCrcFailInd = rgSCHCmnUlCrcFailInd;
2718 rgSchCmnApis.rgSCHUlCrcInd = rgSCHCmnUlCrcInd;
2720 rgSchCmnApis.rgSCHDrxStrtInActvTmrInUl = rgSCHCmnDrxStrtInActvTmrInUl;
2721 rgSchCmnApis.rgSCHUpdUeDataIndLcg = rgSCHCmnUpdUeDataIndLcg;
2723 for (idx = 0; idx < RGSCH_NUM_SCHEDULERS; ++idx)
2725 rgSchUlSchdInits[idx](&rgSchUlSchdTbl[idx]);
2726 rgSchDlSchdInits[idx](&rgSchDlSchdTbl[idx]);
2729 for (idx = 0; idx < RGSCH_NUM_EMTC_SCHEDULERS; ++idx)
2731 rgSchEmtcUlSchdInits[idx](&rgSchEmtcUlSchdTbl[idx]);
2732 rgSchEmtcDlSchdInits[idx](&rgSchEmtcDlSchdTbl[idx]);
2735 #if (defined (RG_PHASE2_SCHED) && defined(TFU_UPGRADE))
2736 for (idx = 0; idx < RGSCH_NUM_DLFS_SCHEDULERS; ++idx)
2738 rgSchDlfsSchdInits[idx](&rgSchDlfsSchdTbl[idx]);
2742 rgSchCmnApis.rgSCHRgrSCellUeCfg = rgSCHCmnRgrSCellUeCfg;
2743 rgSchCmnApis.rgSCHRgrSCellUeDel = rgSCHCmnRgrSCellUeDel;
2750 * @brief This function is a wrapper to call scheduler specific API.
2754 * Function: rgSCHCmnDlRlsSubFrm
2755 * Purpose: Releases scheduler Information from DL SubFrm.
2759 * @param[in] RgSchCellCb *cell
2760 * @param[out] CmLteTimingInfo frm
2765 PUBLIC Void rgSCHCmnDlRlsSubFrm
2771 PUBLIC Void rgSCHCmnDlRlsSubFrm(cell, frm)
2773 CmLteTimingInfo frm;
2776 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2779 TRC2(rgSCHCmnDlRlsSubFrm);
2781 /* Get the pointer to the subframe */
2782 sf = rgSCHUtlSubFrmGet(cell, frm);
2784 rgSCHUtlSubFrmPut(cell, sf);
2787 /* Re-initialize DLFS specific information for the sub-frame */
2788 cellSch->apisDlfs->rgSCHDlfsReinitSf(cell, sf);
2796 * @brief This function is the starting function for DL allocation.
2800 * Function: rgSCHCmnDlCmnChAlloc
2801 * Purpose: Scheduling for downlink. It performs allocation in the order
2802 * of priority wich BCCH/PCH first, CCCH, Random Access and TA.
2804 * Invoked by: Scheduler
2806 * @param[in] RgSchCellCb* cell
2807 * @param[out] RgSchCmnDlRbAllocInfo* allocInfo
2812 PRIVATE Void rgSCHCmnDlCcchRarAlloc
2817 PRIVATE Void rgSCHCmnDlCcchRarAlloc(cell)
2821 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2823 TRC2(rgSCHCmnDlCcchRarAlloc);
2825 rgSCHCmnDlCcchRetx(cell, &cellSch->allocInfo);
2826 /* LTE_ADV_FLAG_REMOVED_START */
2827 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2829 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2831 /*eNodeB need to blank the subframe */
2835 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2840 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2842 /* LTE_ADV_FLAG_REMOVED_END */
2846 /*Added these function calls for processing CCCH SDU arriving
2847 * after guard timer expiry.Functions differ from above two functions
2848 * in using ueCb instead of raCb.*/
2849 rgSCHCmnDlCcchSduRetx(cell, &cellSch->allocInfo);
2850 /* LTE_ADV_FLAG_REMOVED_START */
2851 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2853 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2855 /*eNodeB need to blank the subframe */
2859 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2864 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2866 /* LTE_ADV_FLAG_REMOVED_END */
2870 if(cellSch->ul.msg3SchdIdx != RGSCH_INVALID_INFO)
2872 /* Do not schedule msg3 if there is a CFI change ongoing */
2873 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2875 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2879 /* LTE_ADV_FLAG_REMOVED_START */
2880 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2882 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2884 /*eNodeB need to blank the subframe */
2888 /* Do not schedule msg3 if there is a CFI change ongoing */
2889 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2891 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2897 /* Do not schedule msg3 if there is a CFI change ongoing */
2898 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2900 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2903 /* LTE_ADV_FLAG_REMOVED_END */
2911 * @brief Scheduling for CCCH SDU.
2915 * Function: rgSCHCmnCcchSduAlloc
2916 * Purpose: Scheduling for CCCH SDU
2918 * Invoked by: Scheduler
2920 * @param[in] RgSchCellCb* cell
2921 * @param[in] RgSchUeCb* ueCb
2922 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2927 PRIVATE S16 rgSCHCmnCcchSduAlloc
2931 RgSchCmnDlRbAllocInfo *allocInfo
2934 PRIVATE S16 rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)
2937 RgSchCmnDlRbAllocInfo *allocInfo;
2940 RgSchDlRbAlloc *rbAllocInfo;
2941 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2942 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
2944 TRC2(rgSCHCmnCcchSduAlloc);
2946 /* Return if subframe BW exhausted */
2947 if (allocInfo->ccchSduAlloc.ccchSduDlSf->bw <=
2948 allocInfo->ccchSduAlloc.ccchSduDlSf->bwAssigned)
2950 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2951 "bw<=bwAssigned for UEID:%d",ueCb->ueId);
2955 if (rgSCHDhmGetCcchSduHqProc(ueCb, cellSch->dl.time, &(ueDl->proc)) != ROK)
2957 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2958 "rgSCHDhmGetCcchSduHqProc failed UEID:%d",ueCb->ueId);
2962 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
2963 rbAllocInfo->dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
2965 if (rgSCHCmnCcchSduDedAlloc(cell, ueCb) != ROK)
2967 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
2968 rgSCHDhmRlsHqpTb(ueDl->proc, 0, FALSE);
2969 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2970 "rgSCHCmnCcchSduDedAlloc failed UEID:%d",ueCb->ueId);
2973 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduTxLst, &ueDl->proc->reqLnk);
2974 ueDl->proc->reqLnk.node = (PTR)ueDl->proc;
2975 allocInfo->ccchSduAlloc.ccchSduDlSf->schdCcchUe++;
2980 * @brief This function scheduler for downlink CCCH messages.
2984 * Function: rgSCHCmnDlCcchSduTx
2985 * Purpose: Scheduling for downlink CCCH
2987 * Invoked by: Scheduler
2989 * @param[in] RgSchCellCb *cell
2990 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2995 PRIVATE Void rgSCHCmnDlCcchSduTx
2998 RgSchCmnDlRbAllocInfo *allocInfo
3001 PRIVATE Void rgSCHCmnDlCcchSduTx(cell, allocInfo)
3003 RgSchCmnDlRbAllocInfo *allocInfo;
3008 RgSchCmnDlUe *ueCmnDl;
3009 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3011 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3013 TRC2(rgSCHCmnDlCcchSduTx);
3015 node = cell->ccchSduUeLst.first;
3018 if(cellSch->dl.maxCcchPerDlSf &&
3019 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3025 ueCb = (RgSchUeCb *)(node->node);
3026 ueCmnDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3028 /* Fix : syed postpone scheduling for this
3029 * until msg4 is done */
3030 /* Fix : syed RLC can erroneously send CCCH SDU BO
3031 * twice. Hence an extra guard to avoid if already
3032 * scheduled for RETX */
3033 if ((!(ueCb->dl.dlInactvMask & RG_HQENT_INACTIVE)) &&
3036 if ((rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)) != ROK)
3043 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"ERROR!! THIS SHOULD "
3044 "NEVER HAPPEN for UEID:%d", ueCb->ueId);
3054 * @brief This function scheduler for downlink CCCH messages.
3058 * Function: rgSCHCmnDlCcchTx
3059 * Purpose: Scheduling for downlink CCCH
3061 * Invoked by: Scheduler
3063 * @param[in] RgSchCellCb *cell
3064 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3069 PRIVATE Void rgSCHCmnDlCcchTx
3072 RgSchCmnDlRbAllocInfo *allocInfo
3075 PRIVATE Void rgSCHCmnDlCcchTx(cell, allocInfo)
3077 RgSchCmnDlRbAllocInfo *allocInfo;
3082 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3083 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3085 TRC2(rgSCHCmnDlCcchTx);
3087 node = cell->raInfo.toBeSchdLst.first;
3090 if(cellSch->dl.maxCcchPerDlSf &&
3091 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3098 raCb = (RgSchRaCb *)(node->node);
3100 /* Address allocation for this UE for MSG 4 */
3101 /* Allocation for Msg4 */
3102 if ((rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)) != ROK)
3113 * @brief This function scheduler for downlink CCCH messages.
3117 * Function: rgSCHCmnDlCcchSduRetx
3118 * Purpose: Scheduling for downlink CCCH
3120 * Invoked by: Scheduler
3122 * @param[in] RgSchCellCb *cell
3123 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3128 PRIVATE Void rgSCHCmnDlCcchSduRetx
3131 RgSchCmnDlRbAllocInfo *allocInfo
3134 PRIVATE Void rgSCHCmnDlCcchSduRetx(cell, allocInfo)
3136 RgSchCmnDlRbAllocInfo *allocInfo;
3139 RgSchDlRbAlloc *rbAllocInfo;
3141 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3143 RgSchDlHqProcCb *hqP;
3146 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3148 TRC2(rgSCHCmnDlCcchSduRetx);
3150 node = cellSch->dl.ccchSduRetxLst.first;
3153 if(cellSch->dl.maxCcchPerDlSf &&
3154 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3161 hqP = (RgSchDlHqProcCb *)(node->node);
3164 /* DwPts Scheduling Changes Start */
3166 if (rgSCHCmnRetxAvoidTdd(allocInfo->ccchSduAlloc.ccchSduDlSf,
3172 /* DwPts Scheduling Changes End */
3174 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3178 ueCb = (RgSchUeCb*)(hqP->hqE->ue);
3179 ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3181 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
3182 /* Fill RB Alloc Info */
3183 rbAllocInfo->dlSf = dlSf;
3184 rbAllocInfo->tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3185 rbAllocInfo->rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3186 /* Fix : syed iMcs setting did not correspond to RETX */
3187 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3188 rbAllocInfo->tbInfo[0].imcs);
3189 rbAllocInfo->rnti = ueCb->ueId;
3190 rbAllocInfo->tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3191 /* Fix : syed Copying info in entirety without depending on stale TX information */
3192 rbAllocInfo->tbInfo[0].tbCb = &hqP->tbInfo[0];
3193 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
3194 /* Fix : syed Assigning proc to scratchpad */
3197 retxBw += rbAllocInfo->rbsReq;
3199 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduRetxLst, \
3201 hqP->reqLnk.node = (PTR)hqP;
3205 dlSf->bwAssigned += retxBw;
3211 * @brief This function scheduler for downlink CCCH messages.
3215 * Function: rgSCHCmnDlCcchRetx
3216 * Purpose: Scheduling for downlink CCCH
3218 * Invoked by: Scheduler
3220 * @param[in] RgSchCellCb *cell
3221 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3226 PRIVATE Void rgSCHCmnDlCcchRetx
3229 RgSchCmnDlRbAllocInfo *allocInfo
3232 PRIVATE Void rgSCHCmnDlCcchRetx(cell, allocInfo)
3234 RgSchCmnDlRbAllocInfo *allocInfo;
3238 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3240 RgSchDlHqProcCb *hqP;
3242 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3244 TRC2(rgSCHCmnDlCcchRetx);
3246 node = cellSch->dl.msg4RetxLst.first;
3249 if(cellSch->dl.maxCcchPerDlSf &&
3250 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3256 hqP = (RgSchDlHqProcCb *)(node->node);
3260 /* DwPts Scheduling Changes Start */
3262 if (rgSCHCmnRetxAvoidTdd(allocInfo->msg4Alloc.msg4DlSf,
3268 /* DwPts Scheduling Changes End */
3270 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3274 raCb = (RgSchRaCb*)(hqP->hqE->raCb);
3275 /* Fill RB Alloc Info */
3276 raCb->rbAllocInfo.dlSf = dlSf;
3277 raCb->rbAllocInfo.tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3278 raCb->rbAllocInfo.rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3279 /* Fix : syed iMcs setting did not correspond to RETX */
3280 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3281 raCb->rbAllocInfo.tbInfo[0].imcs);
3282 raCb->rbAllocInfo.rnti = raCb->tmpCrnti;
3283 raCb->rbAllocInfo.tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3284 /* Fix; syed Copying info in entirety without depending on stale TX information */
3285 raCb->rbAllocInfo.tbInfo[0].tbCb = &hqP->tbInfo[0];
3286 raCb->rbAllocInfo.tbInfo[0].schdlngForTb = TRUE;
3288 retxBw += raCb->rbAllocInfo.rbsReq;
3290 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4RetxLst, \
3292 hqP->reqLnk.node = (PTR)hqP;
3296 dlSf->bwAssigned += retxBw;
3302 * @brief This function implements scheduler DL allocation for
3303 * for broadcast (on PDSCH) and paging.
3307 * Function: rgSCHCmnDlBcchPcch
3308 * Purpose: This function implements scheduler for DL allocation
3309 * for broadcast (on PDSCH) and paging.
3311 * Invoked by: Scheduler
3313 * @param[in] RgSchCellCb* cell
3319 PRIVATE Void rgSCHCmnDlBcchPcch
3322 RgSchCmnDlRbAllocInfo *allocInfo,
3323 RgInfSfAlloc *subfrmAlloc
3326 PRIVATE Void rgSCHCmnDlBcchPcch(cell, allocInfo, subfrmAlloc)
3328 RgSchCmnDlRbAllocInfo *allocInfo;
3329 RgInfSfAlloc *subfrmAlloc;
3332 CmLteTimingInfo frm;
3334 RgSchClcDlLcCb *pcch;
3338 RgSchClcDlLcCb *bcch, *bch;
3339 #endif/*RGR_SI_SCH*/
3342 TRC2(rgSCHCmnDlBcchPcch);
3344 frm = cell->crntTime;
3346 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
3347 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
3348 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
3350 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
3353 /* Compute the subframe for which allocation is being made */
3354 /* essentially, we need pointer to the dl frame for this subframe */
3355 sf = rgSCHUtlSubFrmGet(cell, frm);
3359 bch = rgSCHDbmGetBcchOnBch(cell);
3360 #if (ERRCLASS & ERRCLS_DEBUG)
3363 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on BCH is not configured");
3367 if (bch->boLst.first != NULLP)
3369 bo = (RgSchClcBoRpt *)(bch->boLst.first->node);
3370 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3372 sf->bch.tbSize = bo->bo;
3373 cmLListDelFrm(&bch->boLst, bch->boLst.first);
3374 /* ccpu00117052 - MOD - Passing double pointer
3375 for proper NULLP assignment*/
3376 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(*bo));
3377 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, bch->lcId,TRUE);
3382 if ((frm.sfn % 4 == 0) && (frm.subframe == 0))
3387 allocInfo->bcchAlloc.schdFirst = FALSE;
3388 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
3389 #if (ERRCLASS & ERRCLS_DEBUG)
3392 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3396 if (bcch->boLst.first != NULLP)
3398 bo = (RgSchClcBoRpt *)(bcch->boLst.first->node);
3400 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3402 allocInfo->bcchAlloc.schdFirst = TRUE;
3403 /* Time to perform allocation for this BCCH transmission */
3404 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3408 if(!allocInfo->bcchAlloc.schdFirst)
3411 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
3412 #if (ERRCLASS & ERRCLS_DEBUG)
3415 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3419 lnk = bcch->boLst.first;
3420 while (lnk != NULLP)
3422 bo = (RgSchClcBoRpt *)(lnk->node);
3424 valid = rgSCHCmnChkInWin(frm, bo->timeToTx, bo->maxTimeToTx);
3428 bo->i = RGSCH_CALC_SF_DIFF(frm, bo->timeToTx);
3429 /* Time to perform allocation for this BCCH transmission */
3430 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3435 valid = rgSCHCmnChkPastWin(frm, bo->maxTimeToTx);
3438 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
3439 /* ccpu00117052 - MOD - Passing double pointer
3440 for proper NULLP assignment*/
3441 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo,
3442 sizeof(RgSchClcBoRpt));
3448 rgSCHDlSiSched(cell, allocInfo, subfrmAlloc);
3449 #endif/*RGR_SI_SCH*/
3451 pcch = rgSCHDbmGetPcch(cell);
3455 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"PCCH on DLSCH is not configured");
3459 if (pcch->boLst.first != NULLP)
3461 bo = (RgSchClcBoRpt *)(pcch->boLst.first->node);
3463 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3465 /* Time to perform allocation for this PCCH transmission */
3466 rgSCHCmnClcAlloc(cell, sf, pcch, RGSCH_P_RNTI, allocInfo);
3474 * Fun: rgSCHCmnChkInWin
3476 * Desc: This function checks if frm occurs in window
3478 * Ret: TRUE - if in window
3483 * File: rg_sch_cmn.c
3487 PUBLIC Bool rgSCHCmnChkInWin
3489 CmLteTimingInfo frm,
3490 CmLteTimingInfo start,
3494 PUBLIC Bool rgSCHCmnChkInWin(frm, start, end)
3495 CmLteTimingInfo frm;
3496 CmLteTimingInfo start;
3497 CmLteTimingInfo end;
3502 TRC2(rgSCHCmnChkInWin);
3504 if (end.sfn > start.sfn)
3506 if (frm.sfn > start.sfn
3507 || (frm.sfn == start.sfn && frm.subframe >= start.subframe))
3509 if (frm.sfn < end.sfn
3511 || (frm.sfn == end.sfn && frm.subframe <= end.subframe))
3513 || (frm.sfn == end.sfn && frm.subframe <= start.subframe))
3520 /* Testing for wrap around, sfn wraparound check should be enough */
3521 else if (end.sfn < start.sfn)
3523 if (frm.sfn > start.sfn
3524 || (frm.sfn == start.sfn && frm.subframe >= start.subframe))
3530 if (frm.sfn < end.sfn
3531 || (frm.sfn == end.sfn && frm.subframe <= end.subframe))
3537 else /* start.sfn == end.sfn */
3539 if (frm.sfn == start.sfn
3540 && (frm.subframe >= start.subframe
3541 && frm.subframe <= end.subframe))
3548 } /* end of rgSCHCmnChkInWin*/
3552 * Fun: rgSCHCmnChkPastWin
3554 * Desc: This function checks if frm has gone past window edge
3556 * Ret: TRUE - if past window edge
3561 * File: rg_sch_cmn.c
3565 PUBLIC Bool rgSCHCmnChkPastWin
3567 CmLteTimingInfo frm,
3571 PUBLIC Bool rgSCHCmnChkPastWin(frm, end)
3572 CmLteTimingInfo frm;
3573 CmLteTimingInfo end;
3576 CmLteTimingInfo refFrm = end;
3579 TRC2(rgSCHCmnChkPastWin);
3581 RGSCH_INCR_FRAME(refFrm.sfn);
3582 RGSCH_INCR_SUB_FRAME(end, 1);
3583 pastWin = rgSCHCmnChkInWin(frm, end, refFrm);
3586 } /* end of rgSCHCmnChkPastWin*/
3589 * @brief This function implements allocation of the resources for common
3590 * channels BCCH, PCCH.
3594 * Function: rgSCHCmnClcAlloc
3595 * Purpose: This function implements selection of number of RBs based
3596 * the allowed grant for the service. It is also responsible
3597 * for selection of MCS for the transmission.
3599 * Invoked by: Scheduler
3601 * @param[in] RgSchCellCb *cell,
3602 * @param[in] RgSchDlSf *sf,
3603 * @param[in] RgSchClcDlLcCb *lch,
3604 * @param[in] U16 rnti,
3605 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3610 PRIVATE Void rgSCHCmnClcAlloc
3614 RgSchClcDlLcCb *lch,
3616 RgSchCmnDlRbAllocInfo *allocInfo
3619 PRIVATE Void rgSCHCmnClcAlloc(cell, sf, lch, rnti, allocInfo)
3622 RgSchClcDlLcCb *lch;
3624 RgSchCmnDlRbAllocInfo *allocInfo;
3627 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
3634 U8 cfi = cellDl->currCfi;
3637 TRC2(rgSCHCmnClcAlloc);
3639 bo = (RgSchClcBoRpt *)(lch->boLst.first->node);
3643 /* rgSCHCmnClcRbAllocForFxdTb(cell, bo->bo, cellDl->ccchCqi, &rb);*/
3644 if(cellDl->bitsPerRb==0)
3646 while ((rgTbSzTbl[0][0][rb]) < (tbs*8))
3654 rb = RGSCH_CEIL((tbs*8), cellDl->bitsPerRb);
3656 /* DwPTS Scheduling Changes Start */
3658 if(sf->sfType == RG_SCH_SPL_SF_DATA)
3660 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
3662 /* Calculate the less RE's because of DwPTS */
3663 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
3665 /* Increase number of RBs in Spl SF to compensate for lost REs */
3666 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
3669 /* DwPTS Scheduling Changes End */
3670 /*ccpu00115595- end*/
3671 /* additional check to see if required RBs
3672 * exceeds the available */
3673 if (rb > sf->bw - sf->bwAssigned)
3675 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"BW allocation "
3676 "failed for CRNTI:%d",rnti);
3680 /* Update the subframe Allocated BW field */
3681 sf->bwAssigned = sf->bwAssigned + rb;
3682 /* Fill in the BCCH/PCCH transmission info to the RBAllocInfo struct */
3683 if (rnti == RGSCH_SI_RNTI)
3685 allocInfo->bcchAlloc.rnti = rnti;
3686 allocInfo->bcchAlloc.dlSf = sf;
3687 allocInfo->bcchAlloc.tbInfo[0].bytesReq = tbs;
3688 allocInfo->bcchAlloc.rbsReq = rb;
3689 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
3690 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
3691 /* Nprb indication at PHY for common Ch */
3692 allocInfo->bcchAlloc.nPrb = bo->nPrb;
3696 allocInfo->pcchAlloc.rnti = rnti;
3697 allocInfo->pcchAlloc.dlSf = sf;
3698 allocInfo->pcchAlloc.tbInfo[0].bytesReq = tbs;
3699 allocInfo->pcchAlloc.rbsReq = rb;
3700 allocInfo->pcchAlloc.tbInfo[0].imcs = mcs;
3701 allocInfo->pcchAlloc.tbInfo[0].noLyr = 1;
3702 allocInfo->pcchAlloc.nPrb = bo->nPrb;
3709 * @brief This function implements PDCCH allocation for common channels.
3713 * Function: rgSCHCmnCmnPdcchAlloc
3714 * Purpose: This function implements allocation of PDCCH for a UE.
3715 * 1. This uses index 0 of PDCCH table for efficiency.
3716 * 2. Uses he candidate PDCCH count for the aggr level.
3717 * 3. Look for availability for each candidate and choose
3718 * the first one available.
3720 * Invoked by: Scheduler
3722 * @param[in] RgSchCellCb *cell
3723 * @param[in] RgSchDlSf *sf
3724 * @return RgSchPdcch *
3725 * -# NULLP when unsuccessful
3729 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc
3735 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc(cell, subFrm)
3741 CmLteAggrLvl aggrLvl;
3742 RgSchPdcchInfo *pdcchInfo;
3744 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3745 U8 numCce; /*store num CCEs based on
3746 aggregation level */
3747 TRC2(rgSCHCmnCmnPdcchAlloc);
3749 aggrLvl = cellSch->dl.cmnChAggrLvl;
3751 pdcchInfo = &(subFrm->pdcchInfo);
3753 /* Updating the no. of nCce in pdcchInfo, in case if CFI
3756 if(subFrm->nCce != pdcchInfo->nCce)
3758 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
3761 if(cell->nCce != pdcchInfo->nCce)
3763 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
3769 case CM_LTE_AGGR_LVL4:
3772 case CM_LTE_AGGR_LVL8:
3775 case CM_LTE_AGGR_LVL16:
3782 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
3785 pdcch->isSpsRnti = FALSE;
3787 /* Increment the CCE used counter in the current subframe */
3788 subFrm->cceCnt += numCce;
3789 pdcch->pdcchSearchSpace = RG_SCH_CMN_SEARCH_SPACE;
3794 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
3795 subFrm->isCceFailure = TRUE;
3797 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
3798 "PDCCH ERR: NO PDDCH AVAIL IN COMMON SEARCH SPACE aggr:%u",
3805 * @brief This function implements bandwidth allocation for common channels.
3809 * Function: rgSCHCmnClcRbAlloc
3810 * Purpose: This function implements bandwith allocation logic
3811 * for common control channels.
3813 * Invoked by: Scheduler
3815 * @param[in] RgSchCellCb* cell
3819 * @param[in] U32 *tbs
3820 * @param[in] U8 *mcs
3821 * @param[in] RgSchDlSf *sf
3827 PUBLIC Void rgSCHCmnClcRbAlloc
3840 PUBLIC Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, iTbs, isSpsBo)
3853 PRIVATE Void rgSCHCmnClcRbAlloc
3864 PRIVATE Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, sf)
3873 #endif /* LTEMAC_SPS */
3876 RgSchCmnTbSzEff *effTbl;
3879 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3880 U8 cfi = cellSch->dl.currCfi;
3882 TRC2(rgSCHCmnClcRbAlloc);
3884 /* first get the CQI to MCS table and determine the number of RBs */
3885 effTbl = (RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]);
3886 iTbsVal = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[cqi];
3887 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3889 /* Efficiency is number of bits per 1024 REs */
3890 eff = (*effTbl)[iTbsVal];
3892 /* Get the number of REs needed for this bo */
3893 noRes = ((bo * 8 * 1024) / eff );
3895 /* Get the number of RBs needed for this transmission */
3896 /* Number of RBs = No of REs / No of REs per RB */
3897 tmpRb = RGSCH_CEIL(noRes, cellSch->dl.noResPerRb[cfi]);
3898 /* KWORK_FIX: added check to see if rb has crossed maxRb*/
3899 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3900 if (tmpRb > cellSch->dl.maxDlBwPerUe)
3902 tmpRb = cellSch->dl.maxDlBwPerUe;
3904 while ((rgTbSzTbl[0][iTbsVal][tmpRb-1]/8) < bo &&
3905 (tmpRb < cellSch->dl.maxDlBwPerUe))
3908 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3910 *tbs = rgTbSzTbl[0][iTbsVal][tmpRb-1]/8;
3912 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3920 * @brief Scheduling for MSG4.
3924 * Function: rgSCHCmnMsg4Alloc
3925 * Purpose: Scheduling for MSG4
3927 * Invoked by: Scheduler
3929 * @param[in] RgSchCellCb* cell
3930 * @param[in] RgSchRaCb* raCb
3931 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3936 PRIVATE S16 rgSCHCmnMsg4Alloc
3940 RgSchCmnDlRbAllocInfo *allocInfo
3943 PRIVATE S16 rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)
3946 RgSchCmnDlRbAllocInfo *allocInfo;
3949 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3951 TRC2(rgSCHCmnMsg4Alloc);
3953 /* SR_RACH_STATS : MSG4 TO BE TXED */
3955 /* Return if subframe BW exhausted */
3956 if (allocInfo->msg4Alloc.msg4DlSf->bw <=
3957 allocInfo->msg4Alloc.msg4DlSf->bwAssigned)
3959 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId ,
3964 if (rgSCHDhmGetMsg4HqProc(raCb, cellSch->dl.time) != ROK)
3966 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3967 "rgSCHDhmGetMsg4HqProc failed");
3971 raCb->rbAllocInfo.dlSf = allocInfo->msg4Alloc.msg4DlSf;
3973 if (rgSCHCmnMsg4DedAlloc(cell, raCb) != ROK)
3975 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
3976 rgSCHDhmRlsHqpTb(raCb->dlHqE->msg4Proc, 0, FALSE);
3977 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3978 "rgSCHCmnMsg4DedAlloc failed.");
3981 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4TxLst, &raCb->dlHqE->msg4Proc->reqLnk);
3982 raCb->dlHqE->msg4Proc->reqLnk.node = (PTR)raCb->dlHqE->msg4Proc;
3983 allocInfo->msg4Alloc.msg4DlSf->schdCcchUe++;
3990 * @brief This function implements PDCCH allocation for an UE.
3994 * Function: PdcchAlloc
3995 * Purpose: This function implements allocation of PDCCH for an UE.
3996 * 1. Get the aggregation level for the CQI of the UE.
3997 * 2. Get the candidate PDCCH count for the aggr level.
3998 * 3. Look for availability for each candidate and choose
3999 * the first one available.
4001 * Invoked by: Scheduler
4006 * @param[in] dciFrmt
4007 * @return RgSchPdcch *
4008 * -# NULLP when unsuccessful
4012 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc
4018 TfuDciFormat dciFrmt,
4022 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc(cell, subFrm, cqi, dciFrmt, isDtx)
4027 TfuDciFormat dciFrmt;
4031 CmLteAggrLvl aggrLvl;
4032 RgSchPdcchInfo *pdcchInfo;
4036 TRC2(rgSCHCmnPdcchAlloc);
4038 /* 3.1 consider the selected DCI format size in determining the
4039 * aggregation level */
4040 //TODO_SID Need to update. Currently using 4 aggregation level
4041 aggrLvl = CM_LTE_AGGR_LVL2;//cellSch->dciAggrLvl[cqi][dciFrmt];
4044 if((dciFrmt == TFU_DCI_FORMAT_1A) &&
4045 ((ue) && (ue->allocCmnUlPdcch)) )
4047 pdcch = rgSCHCmnCmnPdcchAlloc(cell, subFrm);
4048 /* Since CRNTI Scrambled */
4051 pdcch->dciNumOfBits = ue->dciSize.cmnSize[dciFrmt];
4052 // prc_trace_format_string(PRC_TRACE_GROUP_PS, PRC_TRACE_INFO_LOW,"Forcing alloc in CMN search spc size %d fmt %d \n",
4053 // pdcch->dciNumOfBits, dciFrmt);
4059 /* Incrementing aggrLvl by 1 if it not AGGR_LVL8(MAX SIZE)
4060 * inorder to increse the redudancy bits for better decoding of UE */
4063 if (aggrLvl != CM_LTE_AGGR_LVL16)
4067 case CM_LTE_AGGR_LVL2:
4068 aggrLvl = CM_LTE_AGGR_LVL4;
4070 case CM_LTE_AGGR_LVL4:
4071 aggrLvl = CM_LTE_AGGR_LVL8;
4073 case CM_LTE_AGGR_LVL8:
4074 aggrLvl = CM_LTE_AGGR_LVL16;
4083 pdcchInfo = &subFrm->pdcchInfo;
4085 /* Updating the no. of nCce in pdcchInfo, in case if CFI
4088 if(subFrm->nCce != pdcchInfo->nCce)
4090 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
4093 if(cell->nCce != pdcchInfo->nCce)
4095 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
4099 if (pdcchInfo->nCce < (1 << (aggrLvl - 1)))
4101 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4102 subFrm->isCceFailure = TRUE;
4103 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4104 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4110 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
4112 /* SR_RACH_STATS : Reset isTBMsg4 */
4113 pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4= FALSE;
4114 pdcch->dci.u.format0Info.isSrGrant = FALSE;
4116 pdcch->isSpsRnti = FALSE;
4118 /* Increment the CCE used counter in the current subframe */
4119 subFrm->cceCnt += aggrLvl;
4120 pdcch->pdcchSearchSpace = RG_SCH_UE_SPECIFIC_SEARCH_SPACE;
4124 if (ue->cell != cell)
4126 /* Secondary Cell */
4127 //pdcch->dciNumOfBits = ue->dciSize.noUlCcSize[dciFrmt];
4128 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4133 //pdcch->dciNumOfBits = ue->dciSize.dedSize[dciFrmt];
4134 //TODO_SID Need to update dci size.
4135 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4141 pdcch->dciNumOfBits = cell->dciSize.size[dciFrmt];
4146 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4147 subFrm->isCceFailure = TRUE;
4149 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4150 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4157 * @brief This function implements BW allocation for CCCH SDU
4161 * Function: rgSCHCmnCcchSduDedAlloc
4162 * Purpose: Downlink bandwidth Allocation for CCCH SDU.
4164 * Invoked by: Scheduler
4166 * @param[in] RgSchCellCb* cell
4167 * @param[out] RgSchUeCb *ueCb
4172 PRIVATE S16 rgSCHCmnCcchSduDedAlloc
4178 PRIVATE S16 rgSCHCmnCcchSduDedAlloc(cell, ueCb)
4183 RgSchDlHqEnt *hqE = NULLP;
4185 RgSchDlRbAlloc *rbAllocinfo = NULLP;
4186 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4190 U8 cfi = cellDl->currCfi;
4193 TRC2(rgSCHCmnCcchSduDedAlloc);
4195 rbAllocinfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
4197 effBo = ueCb->dlCcchInfo.bo + RGSCH_CCCH_SDU_HDRSIZE;
4200 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4201 &rbAllocinfo->tbInfo[0].bytesReq,
4202 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4203 #else /* LTEMAC_SPS */
4204 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4205 &rbAllocinfo->tbInfo[0].bytesReq,\
4206 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4208 #endif /* LTEMAC_SPS */
4211 /* Cannot exceed the total number of RBs in the cell */
4212 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4213 rbAllocinfo->dlSf->bwAssigned)))
4215 /* Check if atleast one allocation was possible.
4216 This may be the case where the Bw is very less and
4217 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4218 if (rbAllocinfo->dlSf->bwAssigned == 0)
4220 numRb = rbAllocinfo->dlSf->bw;
4221 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4222 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4226 rbAllocinfo->rbsReq = numRb;
4227 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4228 /* DwPTS Scheduling Changes Start */
4230 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4232 rbAllocinfo->tbInfo[0].bytesReq =
4233 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1,cfi);
4236 /* DwPTS Scheduling Changes End */
4237 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4245 /* Update the subframe Allocated BW field */
4246 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4247 rbAllocinfo->rbsReq;
4248 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
4249 rbAllocinfo->tbInfo[0].tbCb = &hqE->ccchSduProc->tbInfo[0];
4250 rbAllocinfo->rnti = ueCb->ueId;
4251 rbAllocinfo->tbInfo[0].noLyr = 1;
4258 * @brief This function implements BW allocation for MSG4
4262 * Function: rgSCHCmnMsg4DedAlloc
4263 * Purpose: Downlink bandwidth Allocation for MSG4.
4265 * Invoked by: Scheduler
4267 * @param[in] RgSchCellCb* cell
4268 * @param[out] RgSchRaCb *raCb
4273 PRIVATE S16 rgSCHCmnMsg4DedAlloc
4279 PRIVATE S16 rgSCHCmnMsg4DedAlloc(cell, raCb)
4285 RgSchDlRbAlloc *rbAllocinfo = &raCb->rbAllocInfo;
4289 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4290 U8 cfi = cellDl->currCfi;
4293 TRC2(rgSCHCmnMsg4DedAlloc);
4295 effBo = raCb->dlCcchInfo.bo + RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE;
4298 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4299 &rbAllocinfo->tbInfo[0].bytesReq,\
4300 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4301 #else /* LTEMAC_SPS */
4302 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4303 &rbAllocinfo->tbInfo[0].bytesReq,\
4304 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4306 #endif /* LTEMAC_SPS */
4309 /* Cannot exceed the total number of RBs in the cell */
4310 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4311 rbAllocinfo->dlSf->bwAssigned)))
4313 /* Check if atleast one allocation was possible.
4314 This may be the case where the Bw is very less and
4315 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4316 if (rbAllocinfo->dlSf->bwAssigned == 0)
4318 numRb = rbAllocinfo->dlSf->bw;
4319 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4320 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4324 rbAllocinfo->rbsReq = numRb;
4325 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4326 /* DwPTS Scheduling Changes Start */
4328 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4330 rbAllocinfo->tbInfo[0].bytesReq =
4331 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1, cfi);
4334 /* DwPTS Scheduling Changes End */
4335 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4343 /* Update the subframe Allocated BW field */
4344 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4345 rbAllocinfo->rbsReq;
4346 rbAllocinfo->rnti = raCb->tmpCrnti;
4347 rbAllocinfo->tbInfo[0].tbCb = &raCb->dlHqE->msg4Proc->tbInfo[0];
4348 rbAllocinfo->tbInfo[0].schdlngForTb = TRUE;
4349 rbAllocinfo->tbInfo[0].noLyr = 1;
4356 * @brief This function implements scheduling for RA Response.
4360 * Function: rgSCHCmnDlRaRsp
4361 * Purpose: Downlink scheduling for RA responses.
4363 * Invoked by: Scheduler
4365 * @param[in] RgSchCellCb* cell
4370 PRIVATE Void rgSCHCmnDlRaRsp
4373 RgSchCmnDlRbAllocInfo *allocInfo
4376 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4378 RgSchCmnDlRbAllocInfo *allocInfo;
4381 CmLteTimingInfo frm;
4382 CmLteTimingInfo schFrm;
4388 RgSchTddRachRspLst *rachRsp;
4389 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
4393 TRC2(rgSCHCmnDlRaRsp);
4395 frm = cell->crntTime;
4396 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4398 /* Compute the subframe for which allocation is being made */
4399 /* essentially, we need pointer to the dl frame for this subframe */
4400 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4402 /* Get the RACH Response scheduling related information
4403 * for the subframe with RA index */
4404 raIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][frm.subframe]-1;
4406 rachRsp = &cell->rachRspLst[raIdx];
4408 for(sfnIdx = 0; sfnIdx < rachRsp->numRadiofrms; sfnIdx++)
4410 /* For all scheduled RACH Responses in SFNs */
4412 RG_SCH_CMN_DECR_FRAME(schFrm.sfn, rachRsp->rachRsp[sfnIdx].sfnOffset);
4413 /* For all scheduled RACH Responses in subframes */
4415 subfrmIdx < rachRsp->rachRsp[sfnIdx].numSubfrms; subfrmIdx++)
4417 schFrm.subframe = rachRsp->rachRsp[sfnIdx].subframe[subfrmIdx];
4418 /* compute the last RA RNTI used in the previous subframe */
4419 raIdx = (((schFrm.sfn % cell->raInfo.maxRaSize) * \
4420 RGSCH_NUM_SUB_FRAMES * RGSCH_MAX_RA_RNTI_PER_SUBFRM) \
4423 /* For all RA RNTIs within a subframe */
4425 for(i=0; (i < RGSCH_MAX_RA_RNTI_PER_SUBFRM) && \
4426 (noRaRnti < RGSCH_MAX_TDD_RA_RSP_ALLOC); i++)
4428 rarnti = (schFrm.subframe + RGSCH_NUM_SUB_FRAMES*i + 1);
4429 rntiIdx = (raIdx + RGSCH_NUM_SUB_FRAMES*i);
4431 if (cell->raInfo.raReqLst[rntiIdx].first != NULLP)
4433 /* compute the next RA RNTI */
4434 if (rgSCHCmnRaRspAlloc(cell, subFrm, rntiIdx,
4435 rarnti, noRaRnti, allocInfo) != ROK)
4437 /* The resources are exhausted */
4451 * @brief This function implements scheduling for RA Response.
4455 * Function: rgSCHCmnDlRaRsp
4456 * Purpose: Downlink scheduling for RA responses.
4458 * Invoked by: Scheduler
4460 * @param[in] RgSchCellCb* cell
4461 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4466 PRIVATE Void rgSCHCmnDlRaRsp //FDD
4469 RgSchCmnDlRbAllocInfo *allocInfo
4472 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4474 RgSchCmnDlRbAllocInfo *allocInfo;
4477 CmLteTimingInfo frm;
4478 CmLteTimingInfo winStartFrm;
4484 RgSchCmnCell *sched;
4486 TRC2(rgSCHCmnDlRaRsp);
4488 frm = cell->crntTime;
4489 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4491 /* Compute the subframe for which allocation is being made */
4492 /* essentially, we need pointer to the dl frame for this subframe */
4493 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4494 sched = RG_SCH_CMN_GET_CELL(cell);
4496 /* ccpu00132523 - Window Start calculated by considering RAR window size,
4497 * RAR Wait period, Subframes occuppied for respective preamble format*/
4498 winGap = (sched->dl.numRaSubFrms-1) + (cell->rachCfg.raWinSize-1)
4499 +RGSCH_RARSP_WAIT_PERIOD;
4501 /* Window starting occassion is retrieved using the gap and tried to
4502 * fit to the size of raReqLst array*/
4503 RGSCHDECRFRMCRNTTIME(frm, winStartFrm, winGap);
4505 //5G_TODO TIMING update. Need to check
4506 winStartIdx = (winStartFrm.sfn & 1) * RGSCH_MAX_RA_RNTI+ winStartFrm.subframe;
4508 for(i = 0; ((i < cell->rachCfg.raWinSize) && (noRaRnti < RG_SCH_CMN_MAX_CMN_PDCCH)); i++)
4510 raIdx = (winStartIdx + i) % RGSCH_RAREQ_ARRAY_SIZE;
4512 if (cell->raInfo.raReqLst[raIdx].first != NULLP)
4514 allocInfo->raRspAlloc[noRaRnti].biEstmt = \
4515 (!i * RGSCH_ONE_BIHDR_SIZE);
4516 rarnti = raIdx % RGSCH_MAX_RA_RNTI+ 1;
4517 if (rgSCHCmnRaRspAlloc(cell, subFrm, raIdx,
4518 rarnti, noRaRnti, allocInfo) != ROK)
4520 /* The resources are exhausted */
4523 /* ccpu00132523- If all the RAP ids are not scheduled then need not
4524 * proceed for next RA RNTIs*/
4525 if(allocInfo->raRspAlloc[noRaRnti].numRapids < cell->raInfo.raReqLst[raIdx].count)
4529 noRaRnti++; /* Max of RG_SCH_CMN_MAX_CMN_PDCCH RARNTIs
4530 for response allocation */
4539 * @brief This function allocates the resources for an RARNTI.
4543 * Function: rgSCHCmnRaRspAlloc
4544 * Purpose: Allocate resources to a RARNTI.
4545 * 0. Allocate PDCCH for sending the response.
4546 * 1. Locate the number of RA requests pending for the RARNTI.
4547 * 2. Compute the size of data to be built.
4548 * 3. Using common channel CQI, compute the number of RBs.
4550 * Invoked by: Scheduler
4552 * @param[in] RgSchCellCb *cell,
4553 * @param[in] RgSchDlSf *subFrm,
4554 * @param[in] U16 rarnti,
4555 * @param[in] U8 noRaRnti
4556 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4561 PRIVATE S16 rgSCHCmnRaRspAlloc
4568 RgSchCmnDlRbAllocInfo *allocInfo
4571 PRIVATE S16 rgSCHCmnRaRspAlloc(cell,subFrm,raIndex,rarnti,noRaRnti,allocInfo)
4577 RgSchCmnDlRbAllocInfo *allocInfo;
4580 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4581 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4585 /*ccpu00116700,ccpu00116708- Corrected the wrong type for mcs*/
4588 /* RACH handling related changes */
4589 Bool isAlloc = FALSE;
4590 static U8 schdNumRapid = 0;
4596 U8 cfi = cellDl->currCfi;
4599 TRC2(rgSCHCmnRaRspAlloc);
4604 /* ccpu00132523: Resetting the schdRap Id count in every scheduling subframe*/
4611 if (subFrm->bw == subFrm->bwAssigned)
4613 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4614 "bw == bwAssigned RARNTI:%d",rarnti);
4618 reqLst = &cell->raInfo.raReqLst[raIndex];
4619 if (reqLst->count == 0)
4621 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4622 "reqLst Count=0 RARNTI:%d",rarnti);
4625 remNumRapid = reqLst->count;
4628 /* Limit number of rach rsps to maxMsg3PerUlsf */
4629 if ( schdNumRapid+remNumRapid > cellUl->maxMsg3PerUlSf )
4631 remNumRapid = cellUl->maxMsg3PerUlSf-schdNumRapid;
4637 /* Try allocating for as many RAPIDs as possible */
4638 /* BI sub-header size to the tbSize requirement */
4639 noBytes = RGSCH_GET_RAR_BYTES(remNumRapid) +\
4640 allocInfo->raRspAlloc[noRaRnti].biEstmt;
4641 if ((allwdTbSz = rgSCHUtlGetAllwdCchTbSz(noBytes*8, &nPrb, &mcs)) == -1)
4647 /* rgSCHCmnClcRbAllocForFxdTb(cell, allwdTbSz/8, cellDl->ccchCqi, &rb);*/
4648 if(cellDl->bitsPerRb==0)
4650 while ((rgTbSzTbl[0][0][rb]) <(U32) allwdTbSz)
4658 rb = RGSCH_CEIL(allwdTbSz, cellDl->bitsPerRb);
4660 /* DwPTS Scheduling Changes Start */
4662 if (subFrm->sfType == RG_SCH_SPL_SF_DATA)
4664 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
4666 /* Calculate the less RE's because of DwPTS */
4667 lostRe = rb * (cellDl->noResPerRb[cfi] -
4668 cellDl->numReDwPts[cfi]);
4670 /* Increase number of RBs in Spl SF to compensate for lost REs */
4671 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
4674 /* DwPTS Scheduling Changes End */
4676 /*ccpu00115595- end*/
4677 if (rb > subFrm->bw - subFrm->bwAssigned)
4682 /* Allocation succeeded for 'remNumRapid' */
4685 printf("\n!!!RAR alloc noBytes:%u,allwdTbSz:%u,tbs:%u,rb:%u\n",
4686 noBytes,allwdTbSz,tbs,rb);
4691 RLOG_ARG0(L_INFO,DBG_CELLID,cell->cellId,"BW alloc Failed");
4695 subFrm->bwAssigned = subFrm->bwAssigned + rb;
4697 /* Fill AllocInfo structure */
4698 allocInfo->raRspAlloc[noRaRnti].rnti = rarnti;
4699 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].bytesReq = tbs;
4700 allocInfo->raRspAlloc[noRaRnti].rbsReq = rb;
4701 allocInfo->raRspAlloc[noRaRnti].dlSf = subFrm;
4702 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].imcs = mcs;
4703 allocInfo->raRspAlloc[noRaRnti].raIndex = raIndex;
4704 /* RACH changes for multiple RAPID handling */
4705 allocInfo->raRspAlloc[noRaRnti].numRapids = remNumRapid;
4706 allocInfo->raRspAlloc[noRaRnti].nPrb = nPrb;
4707 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].noLyr = 1;
4708 allocInfo->raRspAlloc[noRaRnti].vrbgReq = RGSCH_CEIL(nPrb,MAX_5GTF_VRBG_SIZE);
4709 schdNumRapid += remNumRapid;
4713 /***********************************************************
4715 * Func : rgSCHCmnUlAllocFillRbInfo
4717 * Desc : Fills the start RB and the number of RBs for
4718 * uplink allocation.
4726 **********************************************************/
4728 PUBLIC Void rgSCHCmnUlAllocFillRbInfo
4735 PUBLIC Void rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc)
4738 RgSchUlAlloc *alloc;
4741 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4742 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4743 U8 cfi = cellDl->currCfi;
4746 TRC2(rgSCHCmnUlAllocFillRbInfo);
4747 alloc->grnt.rbStart = (alloc->sbStart * cellUl->sbSize) +
4748 cell->dynCfiCb.bwInfo[cfi].startRb;
4750 /* Num RBs = numSbAllocated * sbSize - less RBs in the last SB */
4751 alloc->grnt.numRb = (alloc->numSb * cellUl->sbSize);
4757 * @brief Grant request for Msg3.
4761 * Function : rgSCHCmnMsg3GrntReq
4763 * This is invoked by downlink scheduler to request allocation
4766 * - Attempt to allocate msg3 in the current msg3 subframe
4767 * Allocation attempt based on whether preamble is from group A
4768 * and the value of MESSAGE_SIZE_GROUP_A
4769 * - Link allocation with passed RNTI and msg3 HARQ process
4770 * - Set the HARQ process ID (*hqProcIdRef)
4772 * @param[in] RgSchCellCb *cell
4773 * @param[in] CmLteRnti rnti
4774 * @param[in] Bool preamGrpA
4775 * @param[in] RgSchUlHqProcCb *hqProc
4776 * @param[out] RgSchUlAlloc **ulAllocRef
4777 * @param[out] U8 *hqProcIdRef
4781 PRIVATE Void rgSCHCmnMsg3GrntReq
4786 RgSchUlHqProcCb *hqProc,
4787 RgSchUlAlloc **ulAllocRef,
4791 PRIVATE Void rgSCHCmnMsg3GrntReq(cell, rnti, preamGrpA, hqProc,
4792 ulAllocRef, hqProcIdRef)
4796 RgSchUlHqProcCb *hqProc;
4797 RgSchUlAlloc **ulAllocRef;
4801 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4802 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
4804 RgSchUlAlloc *alloc;
4808 TRC2(rgSCHCmnMsg3GrntReq);
4810 *ulAllocRef = NULLP;
4812 /* Fix: ccpu00120610 Use remAllocs from subframe during msg3 allocation */
4813 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
4817 if (preamGrpA == FALSE)
4819 numSb = cellUl->ra.prmblBNumSb;
4820 iMcs = cellUl->ra.prmblBIMcs;
4824 numSb = cellUl->ra.prmblANumSb;
4825 iMcs = cellUl->ra.prmblAIMcs;
4828 if ((hole = rgSCHUtlUlHoleFirst(sf)) != NULLP)
4830 if(*sf->allocCountRef == 0)
4832 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4833 /* Reinitialize the hole */
4834 if (sf->holeDb->count == 1 && (hole->start == 0)) /* Sanity check of holeDb */
4836 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4837 /* Re-Initialize available subbands because of CFI change*/
4838 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4842 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4843 "Error! holeDb sanity check failed RNTI:%d",rnti);
4846 if (numSb <= hole->num)
4849 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
4850 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
4851 alloc->grnt.iMcs = iMcs;
4852 alloc->grnt.iMcsCrnt = iMcs;
4853 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
4854 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
4855 /* To include the length and ModOrder in DataRecp Req.*/
4856 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
4857 RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
4858 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
4859 alloc->grnt.nDmrs = 0;
4860 alloc->grnt.hop = 0;
4861 alloc->grnt.delayBit = 0;
4862 alloc->grnt.isRtx = FALSE;
4863 *ulAllocRef = alloc;
4864 *hqProcIdRef = (cellUl->msg3SchdHqProcIdx);
4865 hqProc->procId = *hqProcIdRef;
4866 hqProc->ulSfIdx = (cellUl->msg3SchdIdx);
4869 alloc->pdcch = FALSE;
4870 alloc->forMsg3 = TRUE;
4871 alloc->hqProc = hqProc;
4872 rgSCHUhmNewTx(hqProc, (U8)(cell->rachCfg.maxMsg3Tx - 1), alloc);
4873 //RLOG_ARG4(L_DEBUG,DBG_CELLID,cell->cellId,
4875 "\nRNTI:%d MSG3 ALLOC proc(%p)procId(%d)schdIdx(%d)\n",
4877 ((PTR)alloc->hqProc),
4878 alloc->hqProc->procId,
4879 alloc->hqProc->ulSfIdx);
4880 RLOG_ARG2(L_DEBUG,DBG_CELLID,cell->cellId,
4881 "alloc(%p)maxMsg3Tx(%d)",
4883 cell->rachCfg.maxMsg3Tx);
4892 * @brief This function determines the allocation limits and
4893 * parameters that aid in DL scheduling.
4897 * Function: rgSCHCmnDlSetUeAllocLmt
4898 * Purpose: This function determines the Maximum RBs
4899 * a UE is eligible to get based on softbuffer
4900 * limitation and cell->>>maxDlBwPerUe. The Codeword
4901 * specific parameters like iTbs, eff and noLyrs
4902 * are also set in this function. This function
4903 * is called while UE configuration and UeDlCqiInd.
4905 * Invoked by: Scheduler
4907 * @param[in] RgSchCellCb *cellCb
4908 * @param[in] RgSchCmnDlUe *ueDl
4913 PRIVATE Void rgSCHCmnDlSetUeAllocLmt
4920 PRIVATE Void rgSCHCmnDlSetUeAllocLmt(cell, ueDl, isEmtcUe)
4928 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
4929 U8 cfi = cellSch->dl.currCfi;
4931 TRC2(rgSCHCmnDlSetUeAllocLmt);
4934 if(TRUE == isEmtcUe)
4936 /* ITbs for CW0 for 1 Layer Tx */
4937 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4938 [ueDl->mimoInfo.cwInfo[0].cqi];
4939 /* ITbs for CW0 for 2 Layer Tx */
4940 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4941 [ueDl->mimoInfo.cwInfo[0].cqi];
4942 /* Eff for CW0 for 1 Layer Tx */
4943 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4944 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4945 /* Eff for CW0 for 2 Layer Tx */
4946 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4947 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4949 /* ITbs for CW1 for 1 Layer Tx */
4950 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4951 [ueDl->mimoInfo.cwInfo[1].cqi];
4952 /* ITbs for CW1 for 2 Layer Tx */
4953 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4954 [ueDl->mimoInfo.cwInfo[1].cqi];
4955 /* Eff for CW1 for 1 Layer Tx */
4956 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4957 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4958 /* Eff for CW1 for 2 Layer Tx */
4959 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4960 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4965 /* ITbs for CW0 for 1 Layer Tx */
4966 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4967 [ueDl->mimoInfo.cwInfo[0].cqi];
4968 /* ITbs for CW0 for 2 Layer Tx */
4969 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4970 [ueDl->mimoInfo.cwInfo[0].cqi];
4971 /* Eff for CW0 for 1 Layer Tx */
4972 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4973 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4974 /* Eff for CW0 for 2 Layer Tx */
4975 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4976 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4978 /* ITbs for CW1 for 1 Layer Tx */
4979 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4980 [ueDl->mimoInfo.cwInfo[1].cqi];
4981 /* ITbs for CW1 for 2 Layer Tx */
4982 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4983 [ueDl->mimoInfo.cwInfo[1].cqi];
4984 /* Eff for CW1 for 1 Layer Tx */
4985 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4986 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4987 /* Eff for CW1 for 2 Layer Tx */
4988 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4989 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4993 // ueDl->laCb.cqiBasediTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0] * 100;
4995 /* Assigning noLyrs to each CW assuming optimal Spatial multiplexing
4997 (ueDl->mimoInfo.ri/2 == 0)? (ueDl->mimoInfo.cwInfo[0].noLyr = 1) : \
4998 (ueDl->mimoInfo.cwInfo[0].noLyr = ueDl->mimoInfo.ri/2);
4999 ueDl->mimoInfo.cwInfo[1].noLyr = ueDl->mimoInfo.ri - ueDl->mimoInfo.cwInfo[0].noLyr;
5000 /* rg002.101:ccpu00102106: correcting DL harq softbuffer limitation logic.
5001 * The maxTbSz is the maximum number of PHY bits a harq process can
5002 * hold. Hence we limit our allocation per harq process based on this.
5003 * Earlier implementation we misinterpreted the maxTbSz to be per UE
5004 * per TTI, but in fact it is per Harq per TTI. */
5005 /* rg002.101:ccpu00102106: cannot exceed the harq Tb Size
5006 * and harq Soft Bits limit.*/
5008 /* Considering iTbs corresponding to 2 layer transmission for
5009 * codeword0(approximation) and the maxLayers supported by
5010 * this UE at this point of time. */
5011 RG_SCH_CMN_TBS_TO_MODODR(ueDl->mimoInfo.cwInfo[0].iTbs[1], modOrder);
5013 /* Bits/modOrder gives #REs, #REs/noResPerRb gives #RBs */
5014 /* rg001.301 -MOD- [ccpu00119213] : avoiding wraparound */
5015 maxRb = ((ueDl->maxSbSz)/(cellSch->dl.noResPerRb[cfi] * modOrder *\
5016 ueDl->mimoInfo.ri));
5017 if (cellSch->dl.isDlFreqSel)
5019 /* Rounding off to left nearest multiple of RBG size */
5020 maxRb -= maxRb % cell->rbgSize;
5022 ueDl->maxRb = RGSCH_MIN(maxRb, cellSch->dl.maxDlBwPerUe);
5023 if (cellSch->dl.isDlFreqSel)
5025 /* Rounding off to right nearest multiple of RBG size */
5026 if (ueDl->maxRb % cell->rbgSize)
5028 ueDl->maxRb += (cell->rbgSize -
5029 (ueDl->maxRb % cell->rbgSize));
5033 /* Set the index of the cwInfo, which is better in terms of
5034 * efficiency. If RI<2, only 1 CW, hence btrCwIdx shall be 0 */
5035 if (ueDl->mimoInfo.ri < 2)
5037 ueDl->mimoInfo.btrCwIdx = 0;
5041 if (ueDl->mimoInfo.cwInfo[0].eff[ueDl->mimoInfo.cwInfo[0].noLyr-1] <\
5042 ueDl->mimoInfo.cwInfo[1].eff[ueDl->mimoInfo.cwInfo[1].noLyr-1])
5044 ueDl->mimoInfo.btrCwIdx = 1;
5048 ueDl->mimoInfo.btrCwIdx = 0;
5058 * @brief This function updates TX Scheme.
5062 * Function: rgSCHCheckAndSetTxScheme
5063 * Purpose: This function determines the Maximum RBs
5064 * a UE is eligible to get based on softbuffer
5065 * limitation and cell->>>maxDlBwPerUe. The Codeword
5066 * specific parameters like iTbs, eff and noLyrs
5067 * are also set in this function. This function
5068 * is called while UE configuration and UeDlCqiInd.
5070 * Invoked by: Scheduler
5072 * @param[in] RgSchCellCb *cell
5073 * @param[in] RgSchUeCb *ue
5078 PRIVATE Void rgSCHCheckAndSetTxScheme
5084 PRIVATE Void rgSCHCheckAndSetTxScheme(cell, ue)
5089 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5090 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue ,cell);
5091 U8 cfi = cellSch->dl.currCfi;
5096 TRC2(rgSCHCheckAndSetTxScheme);
5098 maxiTbs = (*(RgSchCmnCqiToTbs*)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
5099 [RG_SCH_CMN_MAX_CQI - 1];
5100 cqiBasediTbs = (ueDl->laCb[0].cqiBasediTbs)/100;
5101 actualiTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0];
5103 if((actualiTbs < RG_SCH_TXSCHEME_CHNG_ITBS_FACTOR) && (cqiBasediTbs >
5104 actualiTbs) && ((cqiBasediTbs - actualiTbs) > RG_SCH_TXSCHEME_CHNG_THRSHD))
5106 RG_SCH_CMN_SET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5109 if(actualiTbs >= maxiTbs)
5111 RG_SCH_CMN_UNSET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5118 * @brief This function determines the allocation limits and
5119 * parameters that aid in DL scheduling.
5123 * Function: rgSCHCmnDlSetUeAllocLmtLa
5124 * Purpose: This function determines the Maximum RBs
5125 * a UE is eligible to get based on softbuffer
5126 * limitation and cell->>>maxDlBwPerUe. The Codeword
5127 * specific parameters like iTbs, eff and noLyrs
5128 * are also set in this function. This function
5129 * is called while UE configuration and UeDlCqiInd.
5131 * Invoked by: Scheduler
5133 * @param[in] RgSchCellCb *cell
5134 * @param[in] RgSchUeCb *ue
5139 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa
5145 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa(cell, ue)
5153 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5154 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
5155 U8 cfi = cellSch->dl.currCfi;
5159 TRC2(rgSCHCmnDlSetUeAllocLmtLa);
5161 maxiTbs = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[RG_SCH_CMN_MAX_CQI - 1];
5162 if(ueDl->cqiFlag == TRUE)
5164 for(cwIdx=0; cwIdx < RG_SCH_CMN_MAX_CW_PER_UE; cwIdx++)
5168 /* Calcluating the reported iTbs for code word 0 */
5169 reportediTbs = ue->ue5gtfCb.mcs;
5171 iTbsNew = (S32) reportediTbs;
5173 if(!ueDl->laCb[cwIdx].notFirstCqi)
5175 /* This is the first CQI report from UE */
5176 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5177 ueDl->laCb[cwIdx].notFirstCqi = TRUE;
5179 else if ((RG_ITBS_DIFF(reportediTbs, ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0])) > 5)
5181 /* Ignore this iTBS report and mark that last iTBS report was */
5182 /* ignored so that subsequently we reset the LA algorithm */
5183 ueDl->laCb[cwIdx].lastiTbsIgnored = TRUE;
5184 ueDl->laCb[cwIdx].numLastiTbsIgnored++;
5185 if( ueDl->laCb[cwIdx].numLastiTbsIgnored > 10)
5187 /* CQI reported by UE is not catching up. Reset the LA algorithm */
5188 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5189 ueDl->laCb[cwIdx].deltaiTbs = 0;
5190 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5191 ueDl->laCb[cwIdx].numLastiTbsIgnored = 0;
5196 if (ueDl->laCb[cwIdx].lastiTbsIgnored != TRUE)
5198 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5199 (80 * ueDl->laCb[cwIdx].cqiBasediTbs))/100;
5203 /* Reset the LA as iTbs in use caught up with the value */
5204 /* reported by UE. */
5205 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5206 (80 * ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] * 100))/100;
5207 ueDl->laCb[cwIdx].deltaiTbs = 0;
5208 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5212 iTbsNew = (ueDl->laCb[cwIdx].cqiBasediTbs + ueDl->laCb[cwIdx].deltaiTbs)/100;
5214 RG_SCH_CHK_ITBS_RANGE(iTbsNew, maxiTbs);
5216 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] = RGSCH_MIN(iTbsNew, cell->thresholds.maxDlItbs);
5217 //ueDl->mimoInfo.cwInfo[cwIdx].iTbs[1] = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5219 ue->ue5gtfCb.mcs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5221 printf("reportediTbs[%d] cqiBasediTbs[%d] deltaiTbs[%d] iTbsNew[%d] mcs[%d] cwIdx[%d]\n",
5222 reportediTbs, ueDl->laCb[cwIdx].cqiBasediTbs, ueDl->laCb[cwIdx].deltaiTbs,
5223 iTbsNew, ue->ue5gtfCb.mcs, cwIdx);
5227 if((ue->mimoInfo.txMode != RGR_UE_TM_3) && (ue->mimoInfo.txMode != RGR_UE_TM_4))
5232 ueDl->cqiFlag = FALSE;
5239 /***********************************************************
5241 * Func : rgSCHCmnDlUeResetTemp
5243 * Desc : Reset whatever variables where temporarily used
5244 * during UE scheduling.
5252 **********************************************************/
5254 PUBLIC Void rgSCHCmnDlHqPResetTemp
5256 RgSchDlHqProcCb *hqP
5259 PUBLIC Void rgSCHCmnDlHqPResetTemp(hqP)
5260 RgSchDlHqProcCb *hqP;
5264 TRC2(rgSCHCmnDlHqPResetTemp);
5266 /* Fix: syed having a hqP added to Lists for RB assignment rather than
5267 * a UE, as adding UE was limiting handling some scenarios */
5268 hqP->reqLnk.node = (PTR)NULLP;
5269 hqP->schdLstLnk.node = (PTR)NULLP;
5272 } /* rgSCHCmnDlHqPResetTemp */
5274 /***********************************************************
5276 * Func : rgSCHCmnDlUeResetTemp
5278 * Desc : Reset whatever variables where temporarily used
5279 * during UE scheduling.
5287 **********************************************************/
5289 PUBLIC Void rgSCHCmnDlUeResetTemp
5292 RgSchDlHqProcCb *hqP
5295 PUBLIC Void rgSCHCmnDlUeResetTemp(ue, hqP)
5297 RgSchDlHqProcCb *hqP;
5300 RgSchDlRbAlloc *allocInfo;
5301 RgSchCmnDlUe *cmnUe = RG_SCH_CMN_GET_DL_UE(ue,hqP->hqE->cell);
5306 TRC2(rgSCHCmnDlUeResetTemp);
5308 /* Fix : syed check for UE's existence was useless.
5309 * Instead we need to check that reset is done only for the
5310 * information of a scheduled harq proc, which is cmnUe->proc.
5311 * Reset should not be done for non-scheduled hqP */
5312 if((cmnUe->proc == hqP) || (cmnUe->proc == NULLP))
5314 cmnUe->proc = NULLP;
5315 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, hqP->hqE->cell);
5317 tmpCb = allocInfo->laaCb;
5319 cmMemset((U8 *)allocInfo, (U8)0, sizeof(RgSchDlRbAlloc));
5320 allocInfo->rnti = ue->ueId;
5322 allocInfo->laaCb = tmpCb;
5324 /* Fix: syed moving this to a common function for both scheduled
5325 * and non-scheduled UEs */
5326 cmnUe->outStndAlloc = 0;
5328 rgSCHCmnDlHqPResetTemp(hqP);
5331 } /* rgSCHCmnDlUeResetTemp */
5333 /***********************************************************
5335 * Func : rgSCHCmnUlUeResetTemp
5337 * Desc : Reset whatever variables where temporarily used
5338 * during UE scheduling.
5346 **********************************************************/
5348 PUBLIC Void rgSCHCmnUlUeResetTemp
5354 PUBLIC Void rgSCHCmnUlUeResetTemp(cell, ue)
5359 RgSchCmnUlUe *cmnUlUe = RG_SCH_CMN_GET_UL_UE(ue,cell);
5361 TRC2(rgSCHCmnUlUeResetTemp);
5363 cmMemset((U8 *)&cmnUlUe->alloc, (U8)0, sizeof(cmnUlUe->alloc));
5366 } /* rgSCHCmnUlUeResetTemp */
5371 * @brief This function fills the PDCCH information from dlProc.
5375 * Function: rgSCHCmnFillPdcch
5376 * Purpose: This function fills in the PDCCH information
5377 * obtained from the RgSchDlRbAlloc
5378 * during common channel scheduling(P, SI, RA - RNTI's).
5380 * Invoked by: Downlink Scheduler
5382 * @param[out] RgSchPdcch* pdcch
5383 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5388 PUBLIC Void rgSCHCmnFillPdcch
5392 RgSchDlRbAlloc *rbAllocInfo
5395 PUBLIC Void rgSCHCmnFillPdcch(cell, pdcch, rbAllocInfo)
5398 RgSchDlRbAlloc *rbAllocInfo;
5402 TRC2(rgSCHCmnFillPdcch);
5404 /* common channel pdcch filling,
5405 * only 1A and Local is supported */
5406 pdcch->rnti = rbAllocInfo->rnti;
5407 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
5408 switch(rbAllocInfo->dciFormat)
5410 #ifdef RG_5GTF /* ANOOP: ToDo: DCI format B1/B2 filling */
5411 case TFU_DCI_FORMAT_B1:
5414 pdcch->dci.u.formatB1Info.formatType = 0;
5415 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].cmnGrnt.xPDSCHRange;
5416 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].cmnGrnt.rbAssign;
5417 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = 0;
5418 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5419 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = 0;
5420 //pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].ndi;
5421 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].cmnGrnt.rv;
5422 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5423 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5424 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5425 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5426 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5427 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5428 //TODO_SID: Need to update
5429 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5430 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5431 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5432 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5433 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5434 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5435 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].cmnGrnt.SCID;
5436 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5437 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5438 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5440 break; /* case TFU_DCI_FORMAT_B1: */
5443 case TFU_DCI_FORMAT_B2:
5445 //printf(" RG_5GTF:: Pdcch filling with DCI format B2\n");
5447 break; /* case TFU_DCI_FORMAT_B2: */
5450 case TFU_DCI_FORMAT_1A:
5451 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
5453 /*Nprb indication at PHY for common Ch
5454 *setting least significant bit of tpc field to 1 if
5455 nPrb=3 and 0 otherwise. */
5456 if (rbAllocInfo->nPrb == 3)
5458 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 1;
5462 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 0;
5464 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
5465 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
5466 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
5467 rbAllocInfo->tbInfo[0].imcs;
5468 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = 0;
5469 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = 0;
5471 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
5473 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
5474 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
5475 rbAllocInfo->allocInfo.raType2.rbStart,
5476 rbAllocInfo->allocInfo.raType2.numRb);
5479 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = \
5482 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
5483 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
5486 break; /* case TFU_DCI_FORMAT_1A: */
5487 case TFU_DCI_FORMAT_1:
5488 pdcch->dci.u.format1Info.tpcCmd = 0;
5489 /* Avoiding this check,as we dont support Type1 RA */
5491 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
5494 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
5495 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
5496 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
5498 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
5499 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
5501 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
5502 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
5504 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
5505 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
5509 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
5510 pdcch->dci.u.format1Info.allocInfo.ndi = 0;
5511 pdcch->dci.u.format1Info.allocInfo.mcs = rbAllocInfo->tbInfo[0].imcs;
5512 pdcch->dci.u.format1Info.allocInfo.rv = 0;
5514 pdcch->dci.u.format1Info.dai = 1;
5518 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Allocator's icorrect "
5519 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
5527 * @brief This function finds whether the subframe is special subframe or not.
5531 * Function: rgSCHCmnIsSplSubfrm
5532 * Purpose: This function finds the subframe index of the special subframe
5533 * and finds whether the current DL index matches it or not.
5535 * Invoked by: Scheduler
5537 * @param[in] U8 splfrmCnt
5538 * @param[in] U8 curSubfrmIdx
5539 * @param[in] U8 periodicity
5540 * @param[in] RgSchTddSubfrmInfo *subfrmInfo
5545 PRIVATE Bool rgSCHCmnIsSplSubfrm
5550 RgSchTddSubfrmInfo *subfrmInfo
5553 PRIVATE Bool rgSCHCmnIsSplSubfrm(splfrmCnt, curSubfrmIdx, periodicity, subfrmInfo)
5557 RgSchTddSubfrmInfo *subfrmInfo;
5563 TRC2(rgSCHCmnIsSplSubfrm);
5567 if(periodicity == RG_SCH_CMN_5_MS_PRD)
5571 dlSfCnt = ((splfrmCnt-1)/2) *\
5572 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5573 dlSfCnt = dlSfCnt + subfrmInfo->numFrmHf1;
5577 dlSfCnt = (splfrmCnt/2) * \
5578 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5583 dlSfCnt = splfrmCnt * subfrmInfo->numFrmHf1;
5585 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1 +\
5586 (periodicity*splfrmCnt - dlSfCnt);
5590 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1;
5593 if(splfrmIdx == curSubfrmIdx)
5602 * @brief This function updates DAI or UL index.
5606 * Function: rgSCHCmnUpdHqAndDai
5607 * Purpose: Updates the DAI based on UL-DL Configuration
5608 * index and UE. It also updates the HARQ feedback
5609 * time and 'm' index.
5613 * @param[in] RgDlHqProcCb *hqP
5614 * @param[in] RgSchDlSf *subFrm
5615 * @param[in] RgSchDlHqTbCb *tbCb
5616 * @param[in] U8 tbAllocIdx
5621 PRIVATE Void rgSCHCmnUpdHqAndDai
5623 RgSchDlHqProcCb *hqP,
5625 RgSchDlHqTbCb *tbCb,
5629 PRIVATE Void rgSCHCmnUpdHqAndDai(hqP, subFrm, tbCb,tbAllocIdx)
5630 RgSchDlHqProcCb *hqP;
5632 RgSchDlHqTbCb *tbCb;
5636 RgSchUeCb *ue = hqP->hqE->ue;
5638 TRC2(rgSCHCmnUpdHqAndDai);
5642 /* set the time at which UE shall send the feedback
5643 * for this process */
5644 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5645 subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5646 tbCb->fdbkTime.subframe = subFrm->dlFdbkInfo.subframe;
5647 tbCb->m = subFrm->dlFdbkInfo.m;
5651 /* set the time at which UE shall send the feedback
5652 * for this process */
5653 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5654 hqP->subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5655 tbCb->fdbkTime.subframe = hqP->subFrm->dlFdbkInfo.subframe;
5656 tbCb->m = hqP->subFrm->dlFdbkInfo.m;
5659 /* ccpu00132340-MOD- DAI need to be updated for first TB only*/
5660 if(ue && !tbAllocIdx)
5662 Bool havePdcch = (tbCb->hqP->pdcch ? TRUE : FALSE);
5665 dlDai = rgSCHCmnUpdDai(ue, &tbCb->fdbkTime, tbCb->m, havePdcch,tbCb->hqP,
5668 {/* Non SPS occasions */
5669 tbCb->hqP->pdcch->dlDai = dlDai;
5670 /* hqP->ulDai is used for N1 resource filling
5671 * when SPS occaions present in a bundle */
5672 tbCb->hqP->ulDai = tbCb->dai;
5673 tbCb->hqP->dlDai = dlDai;
5677 /* Updatijng pucchFdbkIdx for both PUCCH or PUSCH
5679 tbCb->pucchFdbkIdx = tbCb->hqP->ulDai;
5686 * @brief This function updates DAI or UL index.
5690 * Function: rgSCHCmnUpdDai
5691 * Purpose: Updates the DAI in the ack-nack info, a valid
5692 * ue should be passed
5696 * @param[in] RgDlHqProcCb *hqP
5697 * @param[in] RgSchDlSf *subFrm
5698 * @param[in] RgSchDlHqTbCb *tbCb
5703 PUBLIC U8 rgSCHCmnUpdDai
5706 CmLteTimingInfo *fdbkTime,
5709 RgSchDlHqProcCb *hqP,
5713 PUBLIC U8 rgSCHCmnUpdDai(ue, fdbkTime, m, havePdcch,tbCb,servCellId,hqP,ulDai)
5715 CmLteTimingInfo *fdbkTime;
5718 RgSchDlHqProcCb *hqP;
5722 RgSchTddANInfo *anInfo;
5724 U8 ackNackFdbkArrSize;
5727 TRC2(rgSCHCmnUpdDai);
5732 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
5733 hqP->hqE->cell->cellId,
5736 servCellIdx = RGSCH_PCELL_INDEX;
5738 ackNackFdbkArrSize = hqP->hqE->cell->ackNackFdbkArrSize;
5740 {/* SPS on primary cell */
5741 servCellIdx = RGSCH_PCELL_INDEX;
5742 ackNackFdbkArrSize = ue->cell->ackNackFdbkArrSize;
5746 anInfo = rgSCHUtlGetUeANFdbkInfo(ue, fdbkTime,servCellIdx);
5748 /* If no ACK/NACK feedback already present, create a new one */
5751 anInfo = &ue->cellInfo[servCellIdx]->anInfo[ue->cellInfo[servCellIdx]->nextFreeANIdx];
5752 anInfo->sfn = fdbkTime->sfn;
5753 anInfo->subframe = fdbkTime->subframe;
5754 anInfo->latestMIdx = m;
5755 /* Fixing DAI value - ccpu00109162 */
5756 /* Handle TDD case as in MIMO definition of the function */
5762 anInfo->isSpsOccasion = FALSE;
5763 /* set the free Index to store Ack/Nack Information*/
5764 ue->cellInfo[servCellIdx]->nextFreeANIdx = (ue->cellInfo[servCellIdx]->nextFreeANIdx + 1) %
5770 anInfo->latestMIdx = m;
5771 /* Fixing DAI value - ccpu00109162 */
5772 /* Handle TDD case as in MIMO definition of the function */
5773 anInfo->ulDai = anInfo->ulDai + 1;
5776 anInfo->dlDai = anInfo->dlDai + 1;
5780 /* ignoring the Scell check,
5781 * for primary cell this field is unused*/
5784 anInfo->n1ResTpcIdx = hqP->tpc;
5788 {/* As this not required for release pdcch */
5789 *ulDai = anInfo->ulDai;
5792 RETVALUE(anInfo->dlDai);
5795 #endif /* ifdef LTE_TDD */
5797 PUBLIC U32 rgHqRvRetxCnt[4][2];
5798 PUBLIC U32 rgUlrate_grant;
5801 * @brief This function fills the HqP TB with rbAllocInfo.
5805 * Function: rgSCHCmnFillHqPTb
5806 * Purpose: This function fills in the HqP TB with rbAllocInfo.
5808 * Invoked by: rgSCHCmnFillHqPTb
5810 * @param[in] RgSchCellCb* cell
5811 * @param[in] RgSchDlRbAlloc *rbAllocInfo,
5812 * @param[in] U8 tbAllocIdx
5813 * @param[in] RgSchPdcch *pdcch
5819 PUBLIC Void rgSCHCmnFillHqPTb
5822 RgSchDlRbAlloc *rbAllocInfo,
5827 PUBLIC Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5829 RgSchDlRbAlloc *rbAllocInfo;
5835 PRIVATE Void rgSCHCmnFillHqPTb
5838 RgSchDlRbAlloc *rbAllocInfo,
5843 PRIVATE Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5845 RgSchDlRbAlloc *rbAllocInfo;
5849 #endif /* LTEMAC_SPS */
5851 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
5852 RgSchDlTbAllocInfo *tbAllocInfo = &rbAllocInfo->tbInfo[tbAllocIdx];
5853 RgSchDlHqTbCb *tbInfo = tbAllocInfo->tbCb;
5854 RgSchDlHqProcCb *hqP = tbInfo->hqP;
5856 TRC2(rgSCHCmnFillHqPTb);
5858 /*ccpu00120365-ADD-if tb is disabled, set mcs=0,rv=1.
5859 * Relevant for DCI format 2 & 2A as per 36.213-7.1.7.2
5861 if ( tbAllocInfo->isDisabled)
5864 tbInfo->dlGrnt.iMcs = 0;
5865 tbInfo->dlGrnt.rv = 1;
5867 /* Fill for TB retransmission */
5868 else if (tbInfo->txCntr > 0)
5871 tbInfo->timingInfo = cmnCellDl->time;
5873 if ((tbInfo->isAckNackDtx == TFU_HQFDB_DTX))
5875 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5876 rgHqRvRetxCnt[tbInfo->dlGrnt.rv][tbInfo->tbIdx]++;
5880 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[++(tbInfo->ccchSchdInfo.rvIdx) & 0x03];
5883 /* fill the scheduler information of hqProc */
5884 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5885 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx,hqP->tbInfo,tbInfo->tbIdx );
5886 rgSCHDhmHqTbRetx(hqP->hqE, tbInfo->timingInfo, hqP, tbInfo->tbIdx);
5888 /* Fill for TB transmission */
5891 /* Fill the HqProc */
5892 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5893 tbInfo->tbSz = tbAllocInfo->bytesAlloc;
5894 tbInfo->timingInfo = cmnCellDl->time;
5896 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[0];
5897 /* fill the scheduler information of hqProc */
5898 tbInfo->ccchSchdInfo.rvIdx = 0;
5899 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5900 /* DwPts Scheduling Changes Start */
5901 /* DwPts Scheduling Changes End */
5902 cell->measurements.dlBytesCnt += tbAllocInfo->bytesAlloc;
5905 /*ccpu00120365:-ADD-only add to subFrm list if tb is not disabled */
5906 if ( tbAllocInfo->isDisabled == FALSE )
5908 /* Set the number of transmitting SM layers for this TB */
5909 tbInfo->numLyrs = tbAllocInfo->noLyr;
5910 /* Set the TB state as WAITING to indicate TB has been
5911 * considered for transmission */
5912 tbInfo->state = HQ_TB_WAITING;
5913 hqP->subFrm = rbAllocInfo->dlSf;
5914 tbInfo->hqP->pdcch = pdcch;
5915 //tbInfo->dlGrnt.numRb = rbAllocInfo->rbsAlloc;
5916 rgSCHUtlDlHqPTbAddToTx(hqP->subFrm, hqP, tbInfo->tbIdx);
5922 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
5926 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
5927 * Purpose: This function fills in the PDCCH information
5928 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
5929 * for dedicated service scheduling. It also
5930 * obtains TPC to be filled in from the power module.
5931 * Assign the PDCCH to HQProc.
5933 * Invoked by: Downlink Scheduler
5935 * @param[in] RgSchCellCb* cell
5936 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5937 * @param[in] RgDlHqProc* hqP
5938 * @param[out] RgSchPdcch *pdcch
5944 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2
5947 RgSchDlRbAlloc *rbAllocInfo,
5948 RgSchDlHqProcCb *hqP,
5953 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, pdcch, tpc)
5955 RgSchDlRbAlloc *rbAllocInfo;
5956 RgSchDlHqProcCb *hqP;
5962 TRC2(rgSCHCmnFillHqPPdcchDciFrmtB1B2)
5964 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
5965 //Currently hardcoding values here.
5966 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
5967 switch(rbAllocInfo->dciFormat)
5969 case TFU_DCI_FORMAT_B1:
5971 pdcch->dci.u.formatB1Info.formatType = 0;
5972 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5973 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
5974 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
5975 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5976 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
5977 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
5978 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5979 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5980 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5981 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5982 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5983 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5984 //TODO_SID: Need to update
5985 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5986 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5987 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5988 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5989 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5990 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5991 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
5992 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5993 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5994 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5997 case TFU_DCI_FORMAT_B2:
5999 pdcch->dci.u.formatB2Info.formatType = 1;
6000 pdcch->dci.u.formatB2Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
6001 pdcch->dci.u.formatB2Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
6002 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
6003 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
6004 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6005 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6006 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
6007 pdcch->dci.u.formatB2Info.CSI_BSI_BRI_Req = 0;
6008 pdcch->dci.u.formatB2Info.CSIRS_BRRS_TxTiming = 0;
6009 pdcch->dci.u.formatB2Info.CSIRS_BRRS_SymbIdx = 0;
6010 pdcch->dci.u.formatB2Info.CSIRS_BRRS_ProcInd = 0;
6011 pdcch->dci.u.formatB2Info.xPUCCH_TxTiming = 0;
6012 //TODO_SID: Need to update
6013 pdcch->dci.u.formatB2Info.freqResIdx_xPUCCH = 0;
6014 pdcch->dci.u.formatB2Info.beamSwitch = 0;
6015 pdcch->dci.u.formatB2Info.SRS_Config = 0;
6016 pdcch->dci.u.formatB2Info.SRS_Symbol = 0;
6017 //TODO_SID: Need to check.Currently setting 4(2 layer, ports(8,9) w/o OCC).
6018 pdcch->dci.u.formatB2Info.AntPorts_numLayers = 4;
6019 pdcch->dci.u.formatB2Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
6020 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
6021 pdcch->dci.u.formatB2Info.tpcCmd = 1; //tpc;
6022 pdcch->dci.u.formatB2Info.DL_PCRS = 0;
6026 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId," 5GTF_ERROR Allocator's icorrect "
6027 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
6034 extern U32 totPcellSCell;
6035 extern U32 addedForScell;
6036 extern U32 addedForScell1;
6037 extern U32 addedForScell2;
6039 * @brief This function fills the PDCCH information from dlProc.
6043 * Function: rgSCHCmnFillHqPPdcch
6044 * Purpose: This function fills in the PDCCH information
6045 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6046 * for dedicated service scheduling. It also
6047 * obtains TPC to be filled in from the power module.
6048 * Assign the PDCCH to HQProc.
6050 * Invoked by: Downlink Scheduler
6052 * @param[in] RgSchCellCb* cell
6053 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6054 * @param[in] RgDlHqProc* hqP
6059 PUBLIC Void rgSCHCmnFillHqPPdcch
6062 RgSchDlRbAlloc *rbAllocInfo,
6063 RgSchDlHqProcCb *hqP
6066 PUBLIC Void rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP)
6068 RgSchDlRbAlloc *rbAllocInfo;
6069 RgSchDlHqProcCb *hqP;
6072 RgSchCmnDlCell *cmnCell = RG_SCH_CMN_GET_DL_CELL(cell);
6073 RgSchPdcch *pdcch = rbAllocInfo->pdcch;
6076 TRC2(rgSCHCmnFillHqPPdcch);
6081 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6088 tpc = rgSCHPwrPucchTpcForUe(cell, hqP->hqE->ue);
6090 /* Fix: syed moving this to a common function for both scheduled
6091 * and non-scheduled UEs */
6093 pdcch->ue = hqP->hqE->ue;
6094 if (hqP->hqE->ue->csgMmbrSta == FALSE)
6096 cmnCell->ncsgPrbCnt += rbAllocInfo->rbsAlloc;
6098 cmnCell->totPrbCnt += rbAllocInfo->rbsAlloc;
6101 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlPrbUsg +=
6102 rbAllocInfo->rbsAlloc;
6103 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw0iTbs +=
6104 rbAllocInfo->tbInfo[0].iTbs;
6105 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw0iTbs ++;
6106 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6107 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6110 totPcellSCell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6111 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6113 addedForScell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6114 addedForScell1 += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6116 printf (" Hqp %d cell %d addedForScell %lu addedForScell1 %lu sfn:sf %d:%d \n",
6118 hqP->hqE->cell->cellId,
6122 cell->crntTime.subframe);
6126 hqP->hqE->cell->tenbStats->sch.dlPrbUsage[0] +=
6127 rbAllocInfo->rbsAlloc;
6128 hqP->hqE->cell->tenbStats->sch.dlSumCw0iTbs +=
6129 rbAllocInfo->tbInfo[0].iTbs;
6130 hqP->hqE->cell->tenbStats->sch.dlNumCw0iTbs ++;
6131 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6132 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6133 if (rbAllocInfo->tbInfo[1].schdlngForTb)
6135 hqP->hqE->cell->tenbStats->sch.dlSumCw1iTbs +=
6136 rbAllocInfo->tbInfo[1].iTbs;
6137 hqP->hqE->cell->tenbStats->sch.dlNumCw1iTbs ++;
6138 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw1iTbs +=
6139 rbAllocInfo->tbInfo[1].iTbs;
6140 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw1iTbs ++;
6141 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6142 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6146 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6148 addedForScell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6149 addedForScell2 += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6151 printf (" Hqp %d cell %d addedForScell %lu addedForScell2 %lu \n",
6153 hqP->hqE->cell->cellId,
6158 totPcellSCell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6162 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6163 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6166 printf ("add DL TPT is %lu sfn:sf %d:%d \n", hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt ,
6168 cell->crntTime.subframe);
6174 pdcch->rnti = rbAllocInfo->rnti;
6175 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
6176 /* Update subframe and pdcch info in HqTb control block */
6177 switch(rbAllocInfo->dciFormat)
6180 case TFU_DCI_FORMAT_B1:
6181 case TFU_DCI_FORMAT_B2:
6183 // printf(" RG_5GTF:: Pdcch filling with DCI format B1/B2\n");
6184 rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, \
6190 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6191 "Allocator's incorrect dciForamt Fill for RNTI:%d",rbAllocInfo->rnti);
6198 * @brief This function fills the PDCCH DCI format 1 information from dlProc.
6202 * Function: rgSCHCmnFillHqPPdcchDciFrmt1
6203 * Purpose: This function fills in the PDCCH information
6204 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6205 * for dedicated service scheduling. It also
6206 * obtains TPC to be filled in from the power module.
6207 * Assign the PDCCH to HQProc.
6209 * Invoked by: Downlink Scheduler
6211 * @param[in] RgSchCellCb* cell
6212 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6213 * @param[in] RgDlHqProc* hqP
6214 * @param[out] RgSchPdcch *pdcch
6220 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1
6223 RgSchDlRbAlloc *rbAllocInfo,
6224 RgSchDlHqProcCb *hqP,
6229 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1(cell, rbAllocInfo, hqP, pdcch, tpc)
6231 RgSchDlRbAlloc *rbAllocInfo;
6232 RgSchDlHqProcCb *hqP;
6239 RgSchTddANInfo *anInfo;
6243 /* For activation or reactivation,
6244 * Harq ProcId should be 0 */
6245 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6248 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1)
6250 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6251 pdcch->dci.u.format1Info.tpcCmd = tpc;
6252 /* Avoiding this check,as we dont support Type1 RA */
6254 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6257 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
6258 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
6259 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6261 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
6262 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6264 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
6265 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6267 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
6268 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6273 if ((!(hqP->tbInfo[0].txCntr)) &&
6274 (cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6275 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6276 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV)))
6279 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6283 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6286 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6289 pdcch->dci.u.format1Info.allocInfo.ndi =
6290 rbAllocInfo->tbInfo[0].tbCb->ndi;
6291 pdcch->dci.u.format1Info.allocInfo.mcs =
6292 rbAllocInfo->tbInfo[0].imcs;
6293 pdcch->dci.u.format1Info.allocInfo.rv =
6294 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6296 if(hqP->hqE->ue != NULLP)
6299 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6300 hqP->hqE->cell->cellId,
6303 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6304 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6306 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6307 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6312 pdcch->dci.u.format1Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6316 /* Fixing DAI value - ccpu00109162 */
6317 pdcch->dci.u.format1Info.dai = RG_SCH_MAX_DAI_IDX;
6323 /* always 0 for RACH */
6324 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6326 /* Fixing DAI value - ccpu00109162 */
6327 pdcch->dci.u.format1Info.dai = 1;
6336 * @brief This function fills the PDCCH DCI format 1A information from dlProc.
6340 * Function: rgSCHCmnFillHqPPdcchDciFrmt1A
6341 * Purpose: This function fills in the PDCCH information
6342 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6343 * for dedicated service scheduling. It also
6344 * obtains TPC to be filled in from the power module.
6345 * Assign the PDCCH to HQProc.
6347 * Invoked by: Downlink Scheduler
6349 * @param[in] RgSchCellCb* cell
6350 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6351 * @param[in] RgDlHqProc* hqP
6352 * @param[out] RgSchPdcch *pdcch
6358 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A
6361 RgSchDlRbAlloc *rbAllocInfo,
6362 RgSchDlHqProcCb *hqP,
6367 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A(cell, rbAllocInfo, hqP, pdcch, tpc)
6369 RgSchDlRbAlloc *rbAllocInfo;
6370 RgSchDlHqProcCb *hqP;
6377 RgSchTddANInfo *anInfo;
6381 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6384 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1A)
6386 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6387 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
6388 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = tpc;
6389 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
6390 rbAllocInfo->tbInfo[0].imcs;
6391 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = TRUE;
6393 if ((!(hqP->tbInfo[0].txCntr)) &&
6394 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6395 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6396 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6399 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val = 0;
6403 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val
6407 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val =
6410 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = \
6411 rbAllocInfo->tbInfo[0].tbCb->ndi;
6412 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = \
6413 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6414 /* As of now, we do not support Distributed allocations */
6415 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
6416 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
6417 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
6419 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
6420 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6421 rbAllocInfo->allocInfo.raType2.rbStart,
6422 rbAllocInfo->allocInfo.raType2.numRb);
6424 if(hqP->hqE->ue != NULLP)
6427 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6428 hqP->hqE->cell->cellId,
6430 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6431 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6433 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6434 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6437 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6440 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val =
6441 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6445 /* Fixing DAI value - ccpu00109162 */
6446 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = RG_SCH_MAX_DAI_IDX;
6447 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6448 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6455 /* always 0 for RACH */
6456 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres
6459 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6460 /* Fixing DAI value - ccpu00109162 */
6461 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
6469 * @brief This function fills the PDCCH DCI format 1B information from dlProc.
6473 * Function: rgSCHCmnFillHqPPdcchDciFrmt1B
6474 * Purpose: This function fills in the PDCCH information
6475 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6476 * for dedicated service scheduling. It also
6477 * obtains TPC to be filled in from the power module.
6478 * Assign the PDCCH to HQProc.
6480 * Invoked by: Downlink Scheduler
6482 * @param[in] RgSchCellCb* cell
6483 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6484 * @param[in] RgDlHqProc* hqP
6485 * @param[out] RgSchPdcch *pdcch
6491 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B
6494 RgSchDlRbAlloc *rbAllocInfo,
6495 RgSchDlHqProcCb *hqP,
6500 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B(cell, rbAllocInfo, hqP, pdcch, tpc)
6502 RgSchDlRbAlloc *rbAllocInfo;
6503 RgSchDlHqProcCb *hqP;
6510 RgSchTddANInfo *anInfo;
6514 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6517 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1B)
6519 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6520 pdcch->dci.u.format1bInfo.tpcCmd = tpc;
6521 pdcch->dci.u.format1bInfo.allocInfo.mcs = \
6522 rbAllocInfo->tbInfo[0].imcs;
6524 if ((!(hqP->tbInfo[0].txCntr)) &&
6525 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6526 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6527 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6530 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = 0;
6534 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6537 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6539 pdcch->dci.u.format1bInfo.allocInfo.ndi = \
6540 rbAllocInfo->tbInfo[0].tbCb->ndi;
6541 pdcch->dci.u.format1bInfo.allocInfo.rv = \
6542 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6543 /* As of now, we do not support Distributed allocations */
6544 pdcch->dci.u.format1bInfo.allocInfo.isLocal = TRUE;
6545 pdcch->dci.u.format1bInfo.allocInfo.nGap2.pres = NOTPRSNT;
6546 pdcch->dci.u.format1bInfo.allocInfo.alloc.type =
6548 pdcch->dci.u.format1bInfo.allocInfo.alloc.u.riv =
6549 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6550 rbAllocInfo->allocInfo.raType2.rbStart,
6551 rbAllocInfo->allocInfo.raType2.numRb);
6552 /* Fill precoding Info */
6553 pdcch->dci.u.format1bInfo.allocInfo.pmiCfm = \
6554 rbAllocInfo->mimoAllocInfo.precIdxInfo >> 4;
6555 pdcch->dci.u.format1bInfo.allocInfo.tPmi = \
6556 rbAllocInfo->mimoAllocInfo.precIdxInfo & 0x0F;
6558 if(hqP->hqE->ue != NULLP)
6561 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6562 hqP->hqE->cell->cellId,
6564 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6565 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6567 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6568 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6573 pdcch->dci.u.format1bInfo.dai =
6574 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6578 pdcch->dci.u.format1bInfo.dai = RG_SCH_MAX_DAI_IDX;
6579 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6580 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6591 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
6595 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
6596 * Purpose: This function fills in the PDCCH information
6597 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6598 * for dedicated service scheduling. It also
6599 * obtains TPC to be filled in from the power module.
6600 * Assign the PDCCH to HQProc.
6602 * Invoked by: Downlink Scheduler
6604 * @param[in] RgSchCellCb* cell
6605 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6606 * @param[in] RgDlHqProc* hqP
6607 * @param[out] RgSchPdcch *pdcch
6613 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2
6616 RgSchDlRbAlloc *rbAllocInfo,
6617 RgSchDlHqProcCb *hqP,
6622 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2(cell, rbAllocInfo, hqP, pdcch, tpc)
6624 RgSchDlRbAlloc *rbAllocInfo;
6625 RgSchDlHqProcCb *hqP;
6632 RgSchTddANInfo *anInfo;
6636 /* ccpu00119023-ADD-For activation or reactivation,
6637 * Harq ProcId should be 0 */
6638 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6641 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2)
6643 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6644 /*ccpu00120365:-ADD-call also if tb is disabled */
6645 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6646 rbAllocInfo->tbInfo[1].isDisabled)
6648 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6650 pdcch->dci.u.format2Info.tpcCmd = tpc;
6651 /* Avoiding this check,as we dont support Type1 RA */
6653 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6656 pdcch->dci.u.format2Info.allocInfo.isAllocType0 = TRUE;
6657 pdcch->dci.u.format2Info.allocInfo.resAllocMap[0] =
6658 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6660 pdcch->dci.u.format2Info.allocInfo.resAllocMap[1] =
6661 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6663 pdcch->dci.u.format2Info.allocInfo.resAllocMap[2] =
6664 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6666 pdcch->dci.u.format2Info.allocInfo.resAllocMap[3] =
6667 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6672 if ((!(hqP->tbInfo[0].txCntr)) &&
6673 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6674 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6675 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6678 pdcch->dci.u.format2Info.allocInfo.harqProcId = 0;
6682 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6685 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6687 /* Initialize the TB info for both the TBs */
6688 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].mcs = 0;
6689 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].rv = 1;
6690 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].mcs = 0;
6691 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].rv = 1;
6692 /* Fill tbInfo for scheduled TBs */
6693 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6694 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6695 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6696 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6697 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6698 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6699 /* If we reach this function. It is safely assumed that
6700 * rbAllocInfo->tbInfo[0] always has non default valid values.
6701 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6702 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6704 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6705 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6706 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6707 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6708 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6709 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6711 pdcch->dci.u.format2Info.allocInfo.transSwap =
6712 rbAllocInfo->mimoAllocInfo.swpFlg;
6713 pdcch->dci.u.format2Info.allocInfo.precoding =
6714 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6716 if(hqP->hqE->ue != NULLP)
6720 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6721 hqP->hqE->cell->cellId,
6723 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6724 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6726 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6727 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6732 pdcch->dci.u.format2Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6736 pdcch->dci.u.format2Info.dai = RG_SCH_MAX_DAI_IDX;
6737 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6738 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6748 * @brief This function fills the PDCCH DCI format 2A information from dlProc.
6752 * Function: rgSCHCmnFillHqPPdcchDciFrmt2A
6753 * Purpose: This function fills in the PDCCH information
6754 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6755 * for dedicated service scheduling. It also
6756 * obtains TPC to be filled in from the power module.
6757 * Assign the PDCCH to HQProc.
6759 * Invoked by: Downlink Scheduler
6761 * @param[in] RgSchCellCb* cell
6762 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6763 * @param[in] RgDlHqProc* hqP
6764 * @param[out] RgSchPdcch *pdcch
6770 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A
6773 RgSchDlRbAlloc *rbAllocInfo,
6774 RgSchDlHqProcCb *hqP,
6779 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A(cell, rbAllocInfo, hqP, pdcch, tpc)
6781 RgSchDlRbAlloc *rbAllocInfo;
6782 RgSchDlHqProcCb *hqP;
6788 RgSchTddANInfo *anInfo;
6792 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6795 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2A)
6797 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6798 /*ccpu00120365:-ADD-call also if tb is disabled */
6799 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6800 rbAllocInfo->tbInfo[1].isDisabled)
6803 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6806 pdcch->dci.u.format2AInfo.tpcCmd = tpc;
6807 /* Avoiding this check,as we dont support Type1 RA */
6809 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6812 pdcch->dci.u.format2AInfo.allocInfo.isAllocType0 = TRUE;
6813 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[0] =
6814 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6816 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[1] =
6817 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6819 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[2] =
6820 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6822 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[3] =
6823 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6828 if ((!(hqP->tbInfo[0].txCntr)) &&
6829 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6830 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6831 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6834 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = 0;
6838 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6841 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6843 /* Initialize the TB info for both the TBs */
6844 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].mcs = 0;
6845 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].rv = 1;
6846 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].mcs = 0;
6847 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].rv = 1;
6848 /* Fill tbInfo for scheduled TBs */
6849 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6850 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6851 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6852 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6853 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6854 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6855 /* If we reach this function. It is safely assumed that
6856 * rbAllocInfo->tbInfo[0] always has non default valid values.
6857 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6859 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6861 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6862 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6863 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6864 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6865 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6866 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6869 pdcch->dci.u.format2AInfo.allocInfo.transSwap =
6870 rbAllocInfo->mimoAllocInfo.swpFlg;
6871 pdcch->dci.u.format2AInfo.allocInfo.precoding =
6872 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6874 if(hqP->hqE->ue != NULLP)
6877 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6878 hqP->hqE->cell->cellId,
6880 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6881 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6883 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6884 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6889 pdcch->dci.u.format2AInfo.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6893 pdcch->dci.u.format2AInfo.dai = RG_SCH_MAX_DAI_IDX;
6894 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6895 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6907 * @brief init of Sch vars.
6911 * Function: rgSCHCmnInitVars
6912 Purpose: Initialization of various UL subframe indices
6914 * @param[in] RgSchCellCb *cell
6919 PRIVATE Void rgSCHCmnInitVars
6924 PRIVATE Void rgSCHCmnInitVars(cell)
6928 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6930 TRC2(rgSCHCmnInitVars);
6932 cellUl->idx = RGSCH_INVALID_INFO;
6933 cellUl->schdIdx = RGSCH_INVALID_INFO;
6934 cellUl->schdHqProcIdx = RGSCH_INVALID_INFO;
6935 cellUl->msg3SchdIdx = RGSCH_INVALID_INFO;
6937 cellUl->emtcMsg3SchdIdx = RGSCH_INVALID_INFO;
6939 cellUl->msg3SchdHqProcIdx = RGSCH_INVALID_INFO;
6940 cellUl->rcpReqIdx = RGSCH_INVALID_INFO;
6941 cellUl->hqFdbkIdx[0] = RGSCH_INVALID_INFO;
6942 cellUl->hqFdbkIdx[1] = RGSCH_INVALID_INFO;
6943 cellUl->reTxIdx[0] = RGSCH_INVALID_INFO;
6944 cellUl->reTxIdx[1] = RGSCH_INVALID_INFO;
6945 /* Stack Crash problem for TRACE5 Changes. Added the return below */
6952 * @brief Updation of Sch vars per TTI.
6956 * Function: rgSCHCmnUpdVars
6957 * Purpose: Updation of Sch vars per TTI.
6959 * @param[in] RgSchCellCb *cell
6964 PUBLIC Void rgSCHCmnUpdVars
6969 PUBLIC Void rgSCHCmnUpdVars(cell)
6973 CmLteTimingInfo timeInfo;
6974 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6977 TRC2(rgSCHCmnUpdVars);
6979 idx = (cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.subframe);
6980 cellUl->idx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6982 printf("idx %d cellUl->idx %d RGSCH_NUM_SUB_FRAMES_5G %d time(%d %d) \n",idx,cellUl->idx ,RGSCH_NUM_SUB_FRAMES_5G,cell->crntTime.sfn,cell->crntTime.subframe);
6984 /* Need to scheduler for after SCHED_DELTA */
6985 /* UL allocation has been advanced by 1 subframe
6986 * so that we do not wrap around and send feedback
6987 * before the data is even received by the PHY */
6988 /* Introduced timing delta for UL control */
6989 idx = (cellUl->idx + TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA);
6990 cellUl->schdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6992 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
6993 TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA)
6994 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
6996 /* ccpu00127193 filling schdTime for logging and enhancement purpose*/
6997 cellUl->schdTime = timeInfo;
6999 /* msg3 scheduling two subframes after general scheduling */
7000 idx = (cellUl->idx + RG_SCH_CMN_DL_DELTA + RGSCH_RARSP_MSG3_DELTA);
7001 cellUl->msg3SchdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7003 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
7004 RG_SCH_CMN_DL_DELTA+ RGSCH_RARSP_MSG3_DELTA)
7005 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
7007 idx = (cellUl->idx + TFU_RECPREQ_DLDELTA);
7009 cellUl->rcpReqIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7011 /* Downlink harq feedback is sometime after data reception / harq failure */
7012 /* Since feedback happens prior to scheduling being called, we add 1 to */
7013 /* take care of getting the correct subframe for feedback */
7014 idx = (cellUl->idx - TFU_CRCIND_ULDELTA + RG_SCH_CMN_UL_NUM_SF);
7016 printf("Finally setting cellUl->hqFdbkIdx[0] = %d TFU_CRCIND_ULDELTA %d RG_SCH_CMN_UL_NUM_SF %d\n",idx,TFU_CRCIND_ULDELTA,RG_SCH_CMN_UL_NUM_SF);
7018 cellUl->hqFdbkIdx[0] = (idx % (RG_SCH_CMN_UL_NUM_SF));
7020 idx = ((cellUl->schdIdx) % (RG_SCH_CMN_UL_NUM_SF));
7022 cellUl->reTxIdx[0] = (U8) idx;
7024 printf("cellUl->hqFdbkIdx[0] %d cellUl->reTxIdx[0] %d \n",cellUl->hqFdbkIdx[0], cellUl->reTxIdx[0] );
7026 /* RACHO: update cmn sched specific RACH variables,
7027 * mainly the prachMaskIndex */
7028 rgSCHCmnUpdRachParam(cell);
7037 * @brief To get uplink subframe index associated with current PHICH
7042 * Function: rgSCHCmnGetPhichUlSfIdx
7043 * Purpose: Gets uplink subframe index associated with current PHICH
7044 * transmission based on SFN and subframe no
7046 * @param[in] CmLteTimingInfo *timeInfo
7047 * @param[in] RgSchCellCb *cell
7052 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx
7054 CmLteTimingInfo *timeInfo,
7058 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx(timeInfo, cell)
7059 CmLteTimingInfo *timeInfo;
7063 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7065 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7071 TRC2(rgSCHCmnGetPhichUlSfIdx);
7073 dlsf = rgSCHUtlSubFrmGet(cell, *timeInfo);
7075 if(dlsf->phichOffInfo.sfnOffset == RGSCH_INVALID_INFO)
7077 RETVALUE(RGSCH_INVALID_INFO);
7079 subframe = dlsf->phichOffInfo.subframe;
7081 sfn = (RGSCH_MAX_SFN + timeInfo->sfn -
7082 dlsf->phichOffInfo.sfnOffset) % RGSCH_MAX_SFN;
7084 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7085 * wrap case such that idx will be proper*/
7086 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7087 numUlSf = ((numUlSf * sfn) + rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subframe]) - 1;
7088 idx = numUlSf % (cellUl->numUlSubfrms);
7094 * @brief To get uplink subframe index.
7099 * Function: rgSCHCmnGetUlSfIdx
7100 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7102 * @param[in] CmLteTimingInfo *timeInfo
7103 * @param[in] U8 ulDlCfgIdx
7108 PUBLIC U8 rgSCHCmnGetUlSfIdx
7110 CmLteTimingInfo *timeInfo,
7114 PUBLIC U8 rgSCHCmnGetUlSfIdx(timeInfo, cell)
7115 CmLteTimingInfo *timeInfo;
7119 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7120 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7124 TRC2(rgSCHCmnGetUlSfIdx);
7126 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7127 * wrap case such that idx will be proper*/
7128 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7129 numUlSf = ((numUlSf * timeInfo->sfn) + \
7130 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->subframe]) - 1;
7131 idx = numUlSf % (cellUl->numUlSubfrms);
7139 * @brief To get uplink hq index.
7144 * Function: rgSCHCmnGetUlHqProcIdx
7145 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7147 * @param[in] CmLteTimingInfo *timeInfo
7148 * @param[in] U8 ulDlCfgIdx
7153 PUBLIC U8 rgSCHCmnGetUlHqProcIdx
7155 CmLteTimingInfo *timeInfo,
7159 PUBLIC U8 rgSCHCmnGetUlHqProcIdx(timeInfo, cell)
7160 CmLteTimingInfo *timeInfo;
7168 numUlSf = (timeInfo->sfn * RGSCH_NUM_SUB_FRAMES_5G + timeInfo->subframe);
7169 procId = numUlSf % RGSCH_NUM_UL_HQ_PROC;
7171 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7172 /*ccpu00130639 - MOD - To get correct UL HARQ Proc IDs for all UL/DL Configs*/
7174 S8 sfnCycle = cell->tddHqSfnCycle;
7175 U8 numUlHarq = rgSchTddUlNumHarqProcTbl[ulDlCfgIdx]
7177 /* TRACE 5 Changes */
7178 TRC2(rgSCHCmnGetUlHqProcIdx);
7180 /* Calculate the number of UL SF in one SFN */
7181 numUlSfInSfn = RGSCH_NUM_SUB_FRAMES -
7182 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7184 /* Check for the SFN wrap around case */
7185 if(cell->crntTime.sfn == 1023 && timeInfo->sfn == 0)
7189 else if(cell->crntTime.sfn == 0 && timeInfo->sfn == 1023)
7191 /* sfnCycle decremented by 1 */
7192 sfnCycle = (sfnCycle + numUlHarq-1) % numUlHarq;
7194 /* Calculate the total number of UL sf */
7195 /* -1 is done since uplink sf are counted from 0 */
7196 numUlSf = numUlSfInSfn * (timeInfo->sfn + (sfnCycle*1024)) +
7197 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->subframe] - 1;
7199 procId = numUlSf % numUlHarq;
7205 /* UL_ALLOC_CHANGES */
7206 /***********************************************************
7208 * Func : rgSCHCmnUlFreeAlloc
7210 * Desc : Free an allocation - invokes UHM and releases
7211 * alloc for the scheduler
7212 * Doest need subframe as argument
7220 **********************************************************/
7222 PUBLIC Void rgSCHCmnUlFreeAlloc
7228 PUBLIC Void rgSCHCmnUlFreeAlloc(cell, alloc)
7230 RgSchUlAlloc *alloc;
7233 RgSchUlHqProcCb *hqProc;
7234 TRC2(rgSCHCmnUlFreeAllocation);
7238 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7239 if ((alloc->hqProc->remTx == 0) &&
7240 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7243 RgSchRaCb *raCb = alloc->raCb;
7244 rgSCHUhmFreeProc(alloc->hqProc, cell);
7245 rgSCHUtlUlAllocRelease(alloc);
7246 rgSCHRamDelRaCb(cell, raCb, TRUE);
7251 hqProc = alloc->hqProc;
7252 rgSCHUtlUlAllocRelease(alloc);
7253 rgSCHUhmFreeProc(hqProc, cell);
7258 /***********************************************************
7260 * Func : rgSCHCmnUlFreeAllocation
7262 * Desc : Free an allocation - invokes UHM and releases
7263 * alloc for the scheduler
7271 **********************************************************/
7273 PUBLIC Void rgSCHCmnUlFreeAllocation
7280 PUBLIC Void rgSCHCmnUlFreeAllocation(cell, sf, alloc)
7283 RgSchUlAlloc *alloc;
7286 RgSchUlHqProcCb *hqProc;
7288 TRC2(rgSCHCmnUlFreeAllocation);
7292 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7293 if ((alloc->hqProc->remTx == 0) &&
7294 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7297 RgSchRaCb *raCb = alloc->raCb;
7298 rgSCHUhmFreeProc(alloc->hqProc, cell);
7299 rgSCHUtlUlAllocRls(sf, alloc);
7300 rgSCHRamDelRaCb(cell, raCb, TRUE);
7305 hqProc = alloc->hqProc;
7306 rgSCHUhmFreeProc(hqProc, cell);
7308 /* re-setting the PRB count while freeing the allocations */
7311 rgSCHUtlUlAllocRls(sf, alloc);
7317 * @brief This function implements PDCCH allocation for an UE
7318 * in the currently running subframe.
7322 * Function: rgSCHCmnPdcchAllocCrntSf
7323 * Purpose: This function determines current DL subframe
7324 * and UE DL CQI to call the actual pdcch allocator
7326 * Note that this function is called only
7327 * when PDCCH request needs to be made during
7328 * uplink scheduling.
7330 * Invoked by: Scheduler
7332 * @param[in] RgSchCellCb *cell
7333 * @param[in] RgSchUeCb *ue
7334 * @return RgSchPdcch *
7335 * -# NULLP when unsuccessful
7338 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf
7344 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf(cell, ue)
7349 CmLteTimingInfo frm = cell->crntTime;
7350 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7352 RgSchPdcch *pdcch = NULLP;
7354 TRC2(rgSCHCmnPdcchAllocCrntSf);
7355 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7356 sf = rgSCHUtlSubFrmGet(cell, frm);
7359 if (ue->allocCmnUlPdcch)
7361 pdcch = rgSCHCmnCmnPdcchAlloc(cell, sf);
7362 /* Since CRNTI Scrambled */
7365 pdcch->dciNumOfBits = ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
7371 //pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, y, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_0, FALSE);
7372 pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_A1, FALSE);
7377 /***********************************************************
7379 * Func : rgSCHCmnUlAllocFillNdmrs
7381 * Desc : Determines and fills N_dmrs for a UE uplink
7386 * Notes: N_dmrs determination is straightforward, so
7387 * it is configured per subband
7391 **********************************************************/
7393 PUBLIC Void rgSCHCmnUlAllocFillNdmrs
7395 RgSchCmnUlCell *cellUl,
7399 PUBLIC Void rgSCHCmnUlAllocFillNdmrs(cellUl, alloc)
7400 RgSchCmnUlCell *cellUl;
7401 RgSchUlAlloc *alloc;
7404 TRC2(rgSCHCmnUlAllocFillNdmrs);
7405 alloc->grnt.nDmrs = cellUl->dmrsArr[alloc->sbStart];
7409 /***********************************************************
7411 * Func : rgSCHCmnUlAllocLnkHqProc
7413 * Desc : Links a new allocation for an UE with the
7414 * appropriate HARQ process of the UE.
7422 **********************************************************/
7424 PUBLIC Void rgSCHCmnUlAllocLnkHqProc
7427 RgSchUlAlloc *alloc,
7428 RgSchUlHqProcCb *proc,
7432 PUBLIC Void rgSCHCmnUlAllocLnkHqProc(ue, alloc, proc, isRetx)
7434 RgSchUlAlloc *alloc;
7435 RgSchUlHqProcCb *proc;
7439 TRC2(rgSCHCmnUlAllocLnkHqProc);
7443 rgSCHCmnUlAdapRetx(alloc, proc);
7447 #ifdef LTE_L2_MEAS /* L2_COUNTERS */
7450 rgSCHUhmNewTx(proc, (((RgUeUlHqCb*)proc->hqEnt)->maxHqRetx), alloc);
7456 * @brief This function releases a PDCCH in the subframe that is
7457 * currently being allocated for.
7461 * Function: rgSCHCmnPdcchRlsCrntSf
7462 * Purpose: This function determines current DL subframe
7463 * which is considered for PDCCH allocation,
7464 * and then calls the actual function that
7465 * releases a PDCCH in a specific subframe.
7466 * Note that this function is called only
7467 * when PDCCH release needs to be made during
7468 * uplink scheduling.
7470 * Invoked by: Scheduler
7472 * @param[in] RgSchCellCb *cell
7473 * @param[in] RgSchPdcch *pdcch
7477 PUBLIC Void rgSCHCmnPdcchRlsCrntSf
7483 PUBLIC Void rgSCHCmnPdcchRlsCrntSf(cell, pdcch)
7488 CmLteTimingInfo frm = cell->crntTime;
7491 TRC2(rgSCHCmnPdcchRlsCrntSf);
7493 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7494 sf = rgSCHUtlSubFrmGet(cell, frm);
7495 rgSCHUtlPdcchPut(cell, &sf->pdcchInfo, pdcch);
7498 /***********************************************************
7500 * Func : rgSCHCmnUlFillPdcchWithAlloc
7502 * Desc : Fills a PDCCH with format 0 information.
7510 **********************************************************/
7512 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc
7515 RgSchUlAlloc *alloc,
7519 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc(pdcch, alloc, ue)
7521 RgSchUlAlloc *alloc;
7526 TRC2(rgSCHCmnUlFillPdcchWithAlloc);
7529 pdcch->rnti = alloc->rnti;
7530 //pdcch->dci.dciFormat = TFU_DCI_FORMAT_A2;
7531 pdcch->dci.dciFormat = alloc->grnt.dciFrmt;
7533 //Currently hardcoding values here.
7534 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
7535 switch(pdcch->dci.dciFormat)
7537 case TFU_DCI_FORMAT_A1:
7539 pdcch->dci.u.formatA1Info.formatType = 0;
7540 pdcch->dci.u.formatA1Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7541 pdcch->dci.u.formatA1Info.xPUSCH_TxTiming = 0;
7542 pdcch->dci.u.formatA1Info.RBAssign = alloc->grnt.rbAssign;
7543 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7544 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7545 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7546 pdcch->dci.u.formatA1Info.CSI_BSI_BRI_Req = 0;
7547 pdcch->dci.u.formatA1Info.CSIRS_BRRS_TxTiming = 0;
7548 pdcch->dci.u.formatA1Info.CSIRS_BRRS_SymbIdx = 0;
7549 pdcch->dci.u.formatA1Info.CSIRS_BRRS_ProcInd = 0;
7550 pdcch->dci.u.formatA1Info.numBSI_Reports = 0;
7551 pdcch->dci.u.formatA1Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7552 pdcch->dci.u.formatA1Info.beamSwitch = 0;
7553 pdcch->dci.u.formatA1Info.SRS_Config = 0;
7554 pdcch->dci.u.formatA1Info.SRS_Symbol = 0;
7555 pdcch->dci.u.formatA1Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7556 pdcch->dci.u.formatA1Info.SCID = alloc->grnt.SCID;
7557 pdcch->dci.u.formatA1Info.PMI = alloc->grnt.PMI;
7558 pdcch->dci.u.formatA1Info.UL_PCRS = 0;
7559 pdcch->dci.u.formatA1Info.tpcCmd = alloc->grnt.tpc;
7562 case TFU_DCI_FORMAT_A2:
7564 pdcch->dci.u.formatA2Info.formatType = 1;
7565 pdcch->dci.u.formatA2Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7566 pdcch->dci.u.formatA2Info.xPUSCH_TxTiming = 0;
7567 pdcch->dci.u.formatA2Info.RBAssign = alloc->grnt.rbAssign;
7568 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7569 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7570 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7571 pdcch->dci.u.formatA2Info.CSI_BSI_BRI_Req = 0;
7572 pdcch->dci.u.formatA2Info.CSIRS_BRRS_TxTiming = 0;
7573 pdcch->dci.u.formatA2Info.CSIRS_BRRS_SymbIdx = 0;
7574 pdcch->dci.u.formatA2Info.CSIRS_BRRS_ProcInd = 0;
7575 pdcch->dci.u.formatA2Info.numBSI_Reports = 0;
7576 pdcch->dci.u.formatA2Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7577 pdcch->dci.u.formatA2Info.beamSwitch = 0;
7578 pdcch->dci.u.formatA2Info.SRS_Config = 0;
7579 pdcch->dci.u.formatA2Info.SRS_Symbol = 0;
7580 pdcch->dci.u.formatA2Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7581 pdcch->dci.u.formatA2Info.SCID = alloc->grnt.SCID;
7582 pdcch->dci.u.formatA2Info.PMI = alloc->grnt.PMI;
7583 pdcch->dci.u.formatA2Info.UL_PCRS = 0;
7584 pdcch->dci.u.formatA2Info.tpcCmd = alloc->grnt.tpc;
7588 RLOG1(L_ERROR," 5GTF_ERROR UL Allocator's icorrect "
7589 "dciForamt Fill RNTI:%d",alloc->rnti);
7597 /***********************************************************
7599 * Func : rgSCHCmnUlAllocFillTpc
7601 * Desc : Determines and fills TPC for an UE allocation.
7609 **********************************************************/
7611 PUBLIC Void rgSCHCmnUlAllocFillTpc
7618 PUBLIC Void rgSCHCmnUlAllocFillTpc(cell, ue, alloc)
7621 RgSchUlAlloc *alloc;
7624 TRC2(rgSCHCmnUlAllocFillTpc);
7625 alloc->grnt.tpc = rgSCHPwrPuschTpcForUe(cell, ue);
7630 /***********************************************************
7632 * Func : rgSCHCmnAddUeToRefreshQ
7634 * Desc : Adds a UE to refresh queue, so that the UE is
7635 * periodically triggered to refresh it's GBR and
7644 **********************************************************/
7646 PRIVATE Void rgSCHCmnAddUeToRefreshQ
7653 PRIVATE Void rgSCHCmnAddUeToRefreshQ(cell, ue, wait)
7659 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
7661 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
7663 TRC2(rgSCHCmnAddUeToRefreshQ);
7666 cmMemset((U8 *)&arg, 0, sizeof(arg));
7667 arg.tqCp = &sched->tmrTqCp;
7668 arg.tq = sched->tmrTq;
7669 arg.timers = &ueSchd->tmr;
7673 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
7680 * @brief Perform UE reset procedure.
7684 * Function : rgSCHCmnUlUeReset
7686 * This functions performs BSR resetting and
7687 * triggers UL specific scheduler
7688 * to Perform UE reset procedure.
7690 * @param[in] RgSchCellCb *cell
7691 * @param[in] RgSchUeCb *ue
7695 PRIVATE Void rgSCHCmnUlUeReset
7701 PRIVATE Void rgSCHCmnUlUeReset(cell, ue)
7706 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7707 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7709 RgSchCmnLcg *lcgCmn;
7711 RgSchCmnAllocRecord *allRcd;
7712 TRC2(rgSCHCmnUlUeReset);
7714 ue->ul.minReqBytes = 0;
7715 ue->ul.totalBsr = 0;
7717 ue->ul.nonGbrLcgBs = 0;
7718 ue->ul.effAmbr = ue->ul.cfgdAmbr;
7720 node = ueUl->ulAllocLst.first;
7723 allRcd = (RgSchCmnAllocRecord *)node->node;
7727 for(lcgCnt = 0; lcgCnt < RGSCH_MAX_LCG_PER_UE; lcgCnt++)
7729 lcgCmn = RG_SCH_CMN_GET_UL_LCG(&ue->ul.lcgArr[lcgCnt]);
7731 lcgCmn->reportedBs = 0;
7732 lcgCmn->effGbr = lcgCmn->cfgdGbr;
7733 lcgCmn->effDeltaMbr = lcgCmn->deltaMbr;
7735 rgSCHCmnUlUeDelAllocs(cell, ue);
7737 ue->isSrGrant = FALSE;
7739 cellSchd->apisUl->rgSCHUlUeReset(cell, ue);
7741 /* Stack Crash problem for TRACE5 changes. Added the return below */
7747 * @brief RESET UL CQI and DL CQI&RI to conservative values
7748 * for a reestablishing UE.
7752 * Function : rgSCHCmnResetRiCqi
7754 * RESET UL CQI and DL CQI&RI to conservative values
7755 * for a reestablishing UE
7757 * @param[in] RgSchCellCb *cell
7758 * @param[in] RgSchUeCb *ue
7762 PRIVATE Void rgSCHCmnResetRiCqi
7768 PRIVATE Void rgSCHCmnResetRiCqi(cell, ue)
7773 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7774 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
7775 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7776 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7778 TRC2(rgSCHCmnResetRiCqi);
7780 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
7781 cell->isCpUlExtend);
7783 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
7784 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
7785 ueDl->mimoInfo.ri = 1;
7786 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
7787 (ue->mimoInfo.txMode == RGR_UE_TM_6))
7789 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
7791 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
7793 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
7796 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
7798 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
7802 /* Request for an early Aper CQI in case of reest */
7803 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
7804 if(acqiCb && acqiCb->aCqiCfg.pres)
7806 acqiCb->aCqiTrigWt = 0;
7814 * @brief Perform UE reset procedure.
7818 * Function : rgSCHCmnDlUeReset
7820 * This functions performs BO resetting and
7821 * triggers DL specific scheduler
7822 * to Perform UE reset procedure.
7824 * @param[in] RgSchCellCb *cell
7825 * @param[in] RgSchUeCb *ue
7829 PRIVATE Void rgSCHCmnDlUeReset
7835 PRIVATE Void rgSCHCmnDlUeReset(cell, ue)
7840 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7841 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
7842 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7844 TRC2(rgSCHCmnDlUeReset);
7846 if (ueDl->rachInfo.poLnk.node != NULLP)
7848 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
7851 /* Fix: syed Remove from TA List if this UE is there.
7852 * If TA Timer is running. Stop it */
7853 if (ue->dlTaLnk.node)
7855 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
7856 ue->dlTaLnk.node = (PTR)NULLP;
7858 else if (ue->taTmr.tmrEvnt != TMR_NONE)
7860 rgSCHTmrStopTmr(cell, ue->taTmr.tmrEvnt, ue);
7863 cellSchd->apisDl->rgSCHDlUeReset(cell, ue);
7867 rgSCHSCellDlUeReset(cell,ue);
7873 * @brief Perform UE reset procedure.
7877 * Function : rgSCHCmnUeReset
7879 * This functions triggers specific scheduler
7880 * to Perform UE reset procedure.
7882 * @param[in] RgSchCellCb *cell
7883 * @param[in] RgSchUeCb *ue
7889 PUBLIC Void rgSCHCmnUeReset
7895 PUBLIC Void rgSCHCmnUeReset(cell, ue)
7902 RgInfResetHqEnt hqEntRstInfo;
7904 TRC2(rgSCHCmnUeReset);
7905 /* RACHO: remove UE from pdcch, handover and rapId assoc Qs */
7906 rgSCHCmnDelRachInfo(cell, ue);
7908 rgSCHPwrUeReset(cell, ue);
7910 rgSCHCmnUlUeReset(cell, ue);
7911 rgSCHCmnDlUeReset(cell, ue);
7914 /* Making allocCmnUlPdcch TRUE to allocate DCI0/1A from Common search space.
7915 As because multiple cells are added hence 2 bits CqiReq is there
7916 This flag will be set to FALSE once we will get Scell READY */
7917 ue->allocCmnUlPdcch = TRUE;
7920 /* Fix : syed RESET UL CQI and DL CQI&RI to conservative values
7921 * for a reestablishing UE */
7922 /*Reset Cqi Config for all the configured cells*/
7923 for (idx = 0;idx < CM_LTE_MAX_CELLS; idx++)
7925 if (ue->cellInfo[idx] != NULLP)
7927 rgSCHCmnResetRiCqi(ue->cellInfo[idx]->cell, ue);
7930 /*After Reset Trigger APCQI for Pcell*/
7931 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
7932 if(pCellInfo->acqiCb.aCqiCfg.pres)
7934 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
7937 /* sending HqEnt reset to MAC */
7938 hqEntRstInfo.cellId = cell->cellId;
7939 hqEntRstInfo.crnti = ue->ueId;
7941 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
7942 RgSchMacRstHqEnt(&pst,&hqEntRstInfo);
7948 * @brief UE out of MeasGap or AckNackReptn.
7952 * Function : rgSCHCmnActvtUlUe
7954 * This functions triggers specific scheduler
7955 * to start considering it for scheduling.
7957 * @param[in] RgSchCellCb *cell
7958 * @param[in] RgSchUeCb *ue
7964 PUBLIC Void rgSCHCmnActvtUlUe
7970 PUBLIC Void rgSCHCmnActvtUlUe(cell, ue)
7975 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7976 TRC2(rgSCHCmnActvtUlUe);
7978 /* : take care of this in UL retransmission */
7979 cellSchd->apisUl->rgSCHUlActvtUe(cell, ue);
7984 * @brief UE out of MeasGap or AckNackReptn.
7988 * Function : rgSCHCmnActvtDlUe
7990 * This functions triggers specific scheduler
7991 * to start considering it for scheduling.
7993 * @param[in] RgSchCellCb *cell
7994 * @param[in] RgSchUeCb *ue
8000 PUBLIC Void rgSCHCmnActvtDlUe
8006 PUBLIC Void rgSCHCmnActvtDlUe(cell, ue)
8011 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8012 TRC2(rgSCHCmnActvtDlUe);
8014 cellSchd->apisDl->rgSCHDlActvtUe(cell, ue);
8019 * @brief This API is invoked to indicate scheduler of a CRC indication.
8023 * Function : rgSCHCmnHdlUlTransInd
8024 * This API is invoked to indicate scheduler of a CRC indication.
8026 * @param[in] RgSchCellCb *cell
8027 * @param[in] RgSchUeCb *ue
8028 * @param[in] CmLteTimingInfo timingInfo
8033 PUBLIC Void rgSCHCmnHdlUlTransInd
8037 CmLteTimingInfo timingInfo
8040 PUBLIC Void rgSCHCmnHdlUlTransInd(cell, ue, timingInfo)
8043 CmLteTimingInfo timingInfo;
8046 TRC2(rgSCHCmnHdlUlTransInd);
8048 /* Update the latest UL dat/sig transmission time */
8049 RGSCHCPYTIMEINFO(timingInfo, ue->ul.ulTransTime);
8050 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
8052 /* Some UL Transmission from this UE.
8053 * Activate this UE if it was inactive */
8054 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8055 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8063 * @brief Compute the minimum Rank based on Codebook subset
8064 * restriction configuration for 4 Tx Ports and Tx Mode 4.
8068 * Function : rgSCHCmnComp4TxMode4
8070 * Depending on BitMap set at CBSR during Configuration
8071 * - return the least possible Rank
8074 * @param[in] U32 *pmiBitMap
8075 * @return RgSchCmnRank
8078 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4
8083 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4(pmiBitMap)
8087 U32 bitMap0, bitMap1;
8088 TRC2(rgSCHCmnComp4TxMode4);
8089 bitMap0 = pmiBitMap[0];
8090 bitMap1 = pmiBitMap[1];
8091 if((bitMap1) & 0xFFFF)
8093 RETVALUE (RG_SCH_CMN_RANK_1);
8095 else if((bitMap1>>16) & 0xFFFF)
8097 RETVALUE (RG_SCH_CMN_RANK_2);
8099 else if((bitMap0) & 0xFFFF)
8101 RETVALUE (RG_SCH_CMN_RANK_3);
8103 else if((bitMap0>>16) & 0xFFFF)
8105 RETVALUE (RG_SCH_CMN_RANK_4);
8109 RETVALUE (RG_SCH_CMN_RANK_1);
8115 * @brief Compute the minimum Rank based on Codebook subset
8116 * restriction configuration for 2 Tx Ports and Tx Mode 4.
8120 * Function : rgSCHCmnComp2TxMode4
8122 * Depending on BitMap set at CBSR during Configuration
8123 * - return the least possible Rank
8126 * @param[in] U32 *pmiBitMap
8127 * @return RgSchCmnRank
8130 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4
8135 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4(pmiBitMap)
8140 TRC2(rgSCHCmnComp2TxMode4);
8141 bitMap0 = pmiBitMap[0];
8142 if((bitMap0>>26)& 0x0F)
8144 RETVALUE (RG_SCH_CMN_RANK_1);
8146 else if((bitMap0>>30) & 3)
8148 RETVALUE (RG_SCH_CMN_RANK_2);
8152 RETVALUE (RG_SCH_CMN_RANK_1);
8157 * @brief Compute the minimum Rank based on Codebook subset
8158 * restriction configuration for 4 Tx Ports and Tx Mode 3.
8162 * Function : rgSCHCmnComp4TxMode3
8164 * Depending on BitMap set at CBSR during Configuration
8165 * - return the least possible Rank
8168 * @param[in] U32 *pmiBitMap
8169 * @return RgSchCmnRank
8172 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3
8177 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3(pmiBitMap)
8182 TRC2(rgSCHCmnComp4TxMode3);
8183 bitMap0 = pmiBitMap[0];
8184 if((bitMap0>>28)& 1)
8186 RETVALUE (RG_SCH_CMN_RANK_1);
8188 else if((bitMap0>>29) &1)
8190 RETVALUE (RG_SCH_CMN_RANK_2);
8192 else if((bitMap0>>30) &1)
8194 RETVALUE (RG_SCH_CMN_RANK_3);
8196 else if((bitMap0>>31) &1)
8198 RETVALUE (RG_SCH_CMN_RANK_4);
8202 RETVALUE (RG_SCH_CMN_RANK_1);
8207 * @brief Compute the minimum Rank based on Codebook subset
8208 * restriction configuration for 2 Tx Ports and Tx Mode 3.
8212 * Function : rgSCHCmnComp2TxMode3
8214 * Depending on BitMap set at CBSR during Configuration
8215 * - return the least possible Rank
8218 * @param[in] U32 *pmiBitMap
8219 * @return RgSchCmnRank
8222 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3
8227 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3(pmiBitMap)
8232 TRC2(rgSCHCmnComp2TxMode3);
8233 bitMap0 = pmiBitMap[0];
8234 if((bitMap0>>30)& 1)
8236 RETVALUE (RG_SCH_CMN_RANK_1);
8238 else if((bitMap0>>31) &1)
8240 RETVALUE (RG_SCH_CMN_RANK_2);
8244 RETVALUE (RG_SCH_CMN_RANK_1);
8249 * @brief Compute the minimum Rank based on Codebook subset
8250 * restriction configuration.
8254 * Function : rgSCHCmnComputeRank
8256 * Depending on Num Tx Ports and Transmission mode
8257 * - return the least possible Rank
8260 * @param[in] RgrTxMode txMode
8261 * @param[in] U32 *pmiBitMap
8262 * @param[in] U8 numTxPorts
8263 * @return RgSchCmnRank
8266 PRIVATE RgSchCmnRank rgSCHCmnComputeRank
8273 PRIVATE RgSchCmnRank rgSCHCmnComputeRank(txMode, pmiBitMap, numTxPorts)
8279 TRC2(rgSCHCmnComputeRank);
8281 if (numTxPorts ==2 && txMode == RGR_UE_TM_3)
8283 RETVALUE (rgSCHCmnComp2TxMode3(pmiBitMap));
8285 else if (numTxPorts ==4 && txMode == RGR_UE_TM_3)
8287 RETVALUE (rgSCHCmnComp4TxMode3(pmiBitMap));
8289 else if (numTxPorts ==2 && txMode == RGR_UE_TM_4)
8291 RETVALUE (rgSCHCmnComp2TxMode4(pmiBitMap));
8293 else if (numTxPorts ==4 && txMode == RGR_UE_TM_4)
8295 RETVALUE (rgSCHCmnComp4TxMode4(pmiBitMap));
8299 RETVALUE (RG_SCH_CMN_RANK_1);
8306 * @brief Harq Entity Deinitialization for CMN SCH.
8310 * Function : rgSCHCmnDlDeInitHqEnt
8312 * Harq Entity Deinitialization for CMN SCH
8314 * @param[in] RgSchCellCb *cell
8315 * @param[in] RgSchDlHqEnt *hqE
8318 /*KWORK_FIX:Changed function return type to void */
8320 PUBLIC Void rgSCHCmnDlDeInitHqEnt
8326 PUBLIC Void rgSCHCmnDlDeInitHqEnt(cell, hqE)
8331 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8332 RgSchDlHqProcCb *hqP;
8336 TRC2(rgSCHCmnDlDeInitHqEnt);
8338 ret = cellSchd->apisDl->rgSCHDlUeHqEntDeInit(cell, hqE);
8339 /* Free only If the Harq proc are created*/
8344 for(cnt = 0; cnt < hqE->numHqPrcs; cnt++)
8346 hqP = &hqE->procs[cnt];
8347 if ((RG_SCH_CMN_GET_DL_HQP(hqP)))
8349 rgSCHUtlFreeSBuf(cell->instIdx,
8350 (Data**)(&(hqP->sch)), (sizeof(RgSchCmnDlHqProc)));
8354 rgSCHLaaDeInitDlHqProcCb (cell, hqE);
8361 * @brief Harq Entity initialization for CMN SCH.
8365 * Function : rgSCHCmnDlInitHqEnt
8367 * Harq Entity initialization for CMN SCH
8369 * @param[in] RgSchCellCb *cell
8370 * @param[in] RgSchUeCb *ue
8376 PUBLIC S16 rgSCHCmnDlInitHqEnt
8382 PUBLIC S16 rgSCHCmnDlInitHqEnt(cell, hqEnt)
8384 RgSchDlHqEnt *hqEnt;
8388 RgSchDlHqProcCb *hqP;
8391 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8392 TRC2(rgSCHCmnDlInitHqEnt);
8394 for(cnt = 0; cnt < hqEnt->numHqPrcs; cnt++)
8396 hqP = &hqEnt->procs[cnt];
8397 if (rgSCHUtlAllocSBuf(cell->instIdx,
8398 (Data**)&(hqP->sch), (sizeof(RgSchCmnDlHqProc))) != ROK)
8404 if((cell->emtcEnable) &&(hqEnt->ue->isEmtcUe))
8406 if(ROK != cellSchd->apisEmtcDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8415 if(ROK != cellSchd->apisDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8422 } /* rgSCHCmnDlInitHqEnt */
8425 * @brief This function computes distribution of refresh period
8429 * Function: rgSCHCmnGetRefreshDist
8430 * Purpose: This function computes distribution of refresh period
8431 * This is required to align set of UEs refresh
8432 * around the different consecutive subframe.
8434 * Invoked by: rgSCHCmnGetRefreshPerDist
8436 * @param[in] RgSchCellCb *cell
8437 * @param[in] RgSchUeCb *ue
8442 PRIVATE U8 rgSCHCmnGetRefreshDist
8448 PRIVATE U8 rgSCHCmnGetRefreshDist(cell, ue)
8455 Inst inst = cell->instIdx;
8457 TRC2(rgSCHCmnGetRefreshDist);
8459 for(refOffst = 0; refOffst < RGSCH_MAX_REFRESH_OFFSET; refOffst++)
8461 if(cell->refreshUeCnt[refOffst] < RGSCH_MAX_REFRESH_GRPSZ)
8463 cell->refreshUeCnt[refOffst]++;
8464 ue->refreshOffset = refOffst;
8465 /* printf("UE[%d] refresh offset[%d]. Cell refresh ue count[%d].\n", ue->ueId, refOffst, cell->refreshUeCnt[refOffst]); */
8470 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Allocation of refresh distribution failed\n"));
8471 /* We should not enter here normally, but incase of failure, allocating from last offset*/
8472 cell->refreshUeCnt[refOffst-1]++;
8473 ue->refreshOffset = refOffst-1;
8475 RETVALUE(refOffst-1);
8478 * @brief This function computes initial Refresh Wait Period.
8482 * Function: rgSCHCmnGetRefreshPer
8483 * Purpose: This function computes initial Refresh Wait Period.
8484 * This is required to align multiple UEs refresh
8485 * around the same time.
8487 * Invoked by: rgSCHCmnGetRefreshPer
8489 * @param[in] RgSchCellCb *cell
8490 * @param[in] RgSchUeCb *ue
8491 * @param[in] U32 *waitPer
8496 PRIVATE Void rgSCHCmnGetRefreshPer
8503 PRIVATE Void rgSCHCmnGetRefreshPer(cell, ue, waitPer)
8512 TRC2(rgSCHCmnGetRefreshPer);
8514 refreshPer = RG_SCH_CMN_REFRESH_TIME * RG_SCH_CMN_REFRESH_TIMERES;
8515 crntSubFrm = cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.subframe;
8516 /* Fix: syed align multiple UEs to refresh at same time */
8517 *waitPer = refreshPer - (crntSubFrm % refreshPer);
8518 *waitPer = RGSCH_CEIL(*waitPer, RG_SCH_CMN_REFRESH_TIMERES);
8519 *waitPer = *waitPer + rgSCHCmnGetRefreshDist(cell, ue);
8527 * @brief UE initialisation for scheduler.
8531 * Function : rgSCHCmnRgrSCellUeCfg
8533 * This functions intialises UE specific scheduler
8534 * information for SCELL
8535 * 0. Perform basic validations
8536 * 1. Allocate common sched UE cntrl blk
8537 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8539 * 4. Perform DLFS cfg
8541 * @param[in] RgSchCellCb *cell
8542 * @param[in] RgSchUeCb *ue
8543 * @param[out] RgSchErrInfo *err
8549 PUBLIC S16 rgSCHCmnRgrSCellUeCfg
8553 RgrUeSecCellCfg *sCellInfoCfg,
8557 PUBLIC S16 rgSCHCmnRgrSCellUeCfg(sCell, ue, sCellInfoCfg, err)
8560 RgrUeSecCellCfg *sCellInfoCfg;
8567 RgSchCmnAllocRecord *allRcd;
8568 RgSchDlRbAlloc *allocInfo;
8569 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8571 RgSchCmnUlUe *ueUlPcell;
8572 RgSchCmnUe *pCellUeSchCmn;
8573 RgSchCmnUe *ueSchCmn;
8575 RgSchCmnDlUe *pCellUeDl;
8577 Inst inst = ue->cell->instIdx;
8579 U32 idx = (U8)((sCell->cellId - rgSchCb[sCell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8580 TRC2(rgSCHCmnRgrSCellUeCfg);
8582 pCellUeSchCmn = RG_SCH_CMN_GET_UE(ue,ue->cell);
8583 pCellUeDl = &pCellUeSchCmn->dl;
8585 /* 1. Allocate Common sched control block */
8586 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8587 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8589 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Memory allocation FAILED\n"));
8590 err->errCause = RGSCHERR_SCH_CFG;
8593 ueSchCmn = RG_SCH_CMN_GET_UE(ue,sCell);
8595 /*2. Perform UEs downlink configuration */
8596 ueDl = &ueSchCmn->dl;
8599 ueDl->mimoInfo = pCellUeDl->mimoInfo;
8601 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
8602 (ue->mimoInfo.txMode == RGR_UE_TM_6))
8604 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_NO_PMI);
8606 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
8608 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_RI_1);
8610 RGSCH_ARRAY_BOUND_CHECK(sCell->instIdx, rgUeCatTbl, pCellUeSchCmn->cmn.ueCat);
8611 ueDl->maxTbBits = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlTbBits;
8614 ri = RGSCH_MIN(ri, sCell->numTxAntPorts);
8615 if(((CM_LTE_UE_CAT_6 == pCellUeSchCmn->cmn.ueCat )
8616 ||(CM_LTE_UE_CAT_7 == pCellUeSchCmn->cmn.ueCat))
8619 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[1];
8623 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[0];
8626 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
8628 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8629 rgSchTddDlNumHarqProcTbl[sCell->ulDlCfgIdx]);
8631 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8632 RGSCH_NUM_DL_HQ_PROC);
8635 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, ue->isEmtcUe);
8637 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, FALSE);
8641 /* ambrCfgd config moved to ueCb.dl, as it's not needed for per cell wise*/
8643 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, sCell);
8644 allocInfo->rnti = ue->ueId;
8646 /* Initializing the lastCfi value to current cfi value */
8647 ueDl->lastCfi = cellSchd->dl.currCfi;
8649 if ((cellSchd->apisDl->rgSCHRgrSCellDlUeCfg(sCell, ue, err)) != ROK)
8651 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Spec Sched DL UE CFG FAILED\n"));
8655 /* TODO: enhance for DLFS RB Allocation for SCELLs in future dev */
8657 /* DLFS UE Config */
8658 if (cellSchd->dl.isDlFreqSel)
8660 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeCfg(sCell, ue, sCellInfoCfg, err)) != ROK)
8662 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS UE config FAILED\n"));
8667 /* TODO: Do UL SCELL CFG during UL CA dev */
8669 ueUl = RG_SCH_CMN_GET_UL_UE(ue, sCell);
8671 /* TODO_ULCA: SRS for SCELL needs to be handled in the below function call */
8672 rgSCHCmnUpdUeUlCqiInfo(sCell, ue, ueUl, ueSchCmn, cellSchd,
8673 sCell->isCpUlExtend);
8675 ret = rgSCHUhmHqEntInit(sCell, ue);
8678 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL UHM HARQ Ent Init "
8679 "Failed for CRNTI:%d", ue->ueId);
8683 ueUlPcell = RG_SCH_CMN_GET_UL_UE(ue, ue->cell);
8684 /* Initialize uplink HARQ related information for UE */
8685 ueUl->hqEnt.maxHqRetx = ueUlPcell->hqEnt.maxHqRetx;
8686 cmLListInit(&ueUl->hqEnt.free);
8687 cmLListInit(&ueUl->hqEnt.inUse);
8688 for(i=0; i < ueUl->hqEnt.numHqPrcs; i++)
8690 ueUl->hqEnt.hqProcCb[i].hqEnt = (void*)(&ueUl->hqEnt);
8691 ueUl->hqEnt.hqProcCb[i].procId = i;
8692 ueUl->hqEnt.hqProcCb[i].ulSfIdx = RGSCH_INVALID_INFO;
8693 ueUl->hqEnt.hqProcCb[i].alloc = NULLP;
8695 /* ccpu00139513- Initializing SPS flags*/
8696 ueUl->hqEnt.hqProcCb[i].isSpsActvnHqP = FALSE;
8697 ueUl->hqEnt.hqProcCb[i].isSpsOccnHqP = FALSE;
8699 cmLListAdd2Tail(&ueUl->hqEnt.free, &ueUl->hqEnt.hqProcCb[i].lnk);
8700 ueUl->hqEnt.hqProcCb[i].lnk.node = (PTR)&ueUl->hqEnt.hqProcCb[i];
8703 /* Allocate UL BSR allocation tracking List */
8704 cmLListInit(&ueUl->ulAllocLst);
8706 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
8708 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8709 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
8711 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL Memory allocation FAILED"
8712 "for CRNTI:%d",ue->ueId);
8713 err->errCause = RGSCHERR_SCH_CFG;
8716 allRcd->allocTime = sCell->crntTime;
8717 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
8718 allRcd->lnk.node = (PTR)allRcd;
8721 /* After initialising UL part, do power related init */
8722 ret = rgSCHPwrUeSCellCfg(sCell, ue, sCellInfoCfg);
8725 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Could not do "
8726 "power config for UE CRNTI:%d",ue->ueId);
8731 if(TRUE == ue->isEmtcUe)
8733 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8735 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8736 "for CRNTI:%d",ue->ueId);
8743 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8745 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8746 "for CRNTI:%d",ue->ueId);
8751 ue->ul.isUlCaEnabled = TRUE;
8755 } /* rgSCHCmnRgrSCellUeCfg */
8759 * @brief UE initialisation for scheduler.
8763 * Function : rgSCHCmnRgrSCellUeDel
8765 * This functions Delete UE specific scheduler
8766 * information for SCELL
8768 * @param[in] RgSchCellCb *cell
8769 * @param[in] RgSchUeCb *ue
8775 PUBLIC S16 rgSCHCmnRgrSCellUeDel
8777 RgSchUeCellInfo *sCellInfo,
8781 PUBLIC S16 rgSCHCmnRgrSCellUeDel(sCellInfo, ue)
8782 RgSchUeCellInfo *sCellInfo;
8786 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8787 Inst inst = ue->cell->instIdx;
8789 TRC2(rgSCHCmnRgrSCellUeDel);
8791 cellSchd->apisDl->rgSCHRgrSCellDlUeDel(sCellInfo, ue);
8794 rgSCHCmnUlUeDelAllocs(sCellInfo->cell, ue);
8797 if(TRUE == ue->isEmtcUe)
8799 cellSchd->apisEmtcUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8804 cellSchd->apisUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8807 /* DLFS UE Config */
8808 if (cellSchd->dl.isDlFreqSel)
8810 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeDel(sCellInfo->cell, ue)) != ROK)
8812 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS Scell del FAILED\n"));
8817 rgSCHUtlFreeSBuf(sCellInfo->cell->instIdx,
8818 (Data**)(&(sCellInfo->sch)), (sizeof(RgSchCmnUe)));
8822 } /* rgSCHCmnRgrSCellUeDel */
8828 * @brief Handles 5gtf configuration for a UE
8832 * Function : rgSCHCmn5gtfUeCfg
8838 * @param[in] RgSchCellCb *cell
8839 * @param[in] RgSchUeCb *ue
8840 * @param[in] RgrUeCfg *cfg
8846 PUBLIC S16 rgSCHCmn5gtfUeCfg
8853 PUBLIC S16 rgSCHCmn5gtfUeCfg(cell, ue, cfg)
8859 TRC2(rgSCHCmnRgrUeCfg);
8861 RgSchUeGrp *ue5gtfGrp;
8862 ue->ue5gtfCb.grpId = cfg->ue5gtfCfg.grpId;
8863 ue->ue5gtfCb.BeamId = cfg->ue5gtfCfg.BeamId;
8864 ue->ue5gtfCb.numCC = cfg->ue5gtfCfg.numCC;
8865 ue->ue5gtfCb.mcs = cfg->ue5gtfCfg.mcs;
8866 ue->ue5gtfCb.maxPrb = cfg->ue5gtfCfg.maxPrb;
8868 ue->ue5gtfCb.cqiRiPer = 100;
8869 /* 5gtf TODO: CQIs to start from (10,0)*/
8870 ue->ue5gtfCb.nxtCqiRiOccn.sfn = 10;
8871 ue->ue5gtfCb.nxtCqiRiOccn.subframe = 0;
8872 ue->ue5gtfCb.rank = 1;
8874 printf("\nschd cfg at mac,%u,%u,%u,%u,%u\n",ue->ue5gtfCb.grpId,ue->ue5gtfCb.BeamId,ue->ue5gtfCb.numCC,
8875 ue->ue5gtfCb.mcs,ue->ue5gtfCb.maxPrb);
8877 ue5gtfGrp = &(cell->cell5gtfCb.ueGrp5gConf[ue->ue5gtfCb.BeamId]);
8879 /* TODO_5GTF: Currently handling 1 group only. Need to update when multi group
8880 scheduling comes into picture */
8881 if(ue5gtfGrp->beamBitMask & (1 << ue->ue5gtfCb.BeamId))
8883 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8884 "5GTF_ERROR Invalid beam id CRNTI:%d",cfg->crnti);
8887 ue5gtfGrp->beamBitMask |= (1 << ue->ue5gtfCb.BeamId);
8894 * @brief UE initialisation for scheduler.
8898 * Function : rgSCHCmnRgrUeCfg
8900 * This functions intialises UE specific scheduler
8902 * 0. Perform basic validations
8903 * 1. Allocate common sched UE cntrl blk
8904 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8906 * 4. Perform DLFS cfg
8908 * @param[in] RgSchCellCb *cell
8909 * @param[in] RgSchUeCb *ue
8910 * @param[int] RgrUeCfg *ueCfg
8911 * @param[out] RgSchErrInfo *err
8917 PUBLIC S16 rgSCHCmnRgrUeCfg
8925 PUBLIC S16 rgSCHCmnRgrUeCfg(cell, ue, ueCfg, err)
8932 RgSchDlRbAlloc *allocInfo;
8934 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8935 RgSchCmnUe *ueSchCmn;
8939 RgSchCmnAllocRecord *allRcd;
8941 U32 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8942 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
8943 TRC2(rgSCHCmnRgrUeCfg);
8946 /* 1. Allocate Common sched control block */
8947 if((rgSCHUtlAllocSBuf(cell->instIdx,
8948 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8950 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8951 "Memory allocation FAILED for CRNTI:%d",ueCfg->crnti);
8952 err->errCause = RGSCHERR_SCH_CFG;
8955 ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
8956 ue->dl.ueDlCqiCfg = ueCfg->ueDlCqiCfg;
8957 pCellInfo->acqiCb.aCqiCfg = ueCfg->ueDlCqiCfg.aprdCqiCfg;
8958 if(ueCfg->ueCatEnum > 0 )
8960 /*KWORK_FIX removed NULL chk for ueSchCmn*/
8961 ueSchCmn->cmn.ueCat = ueCfg->ueCatEnum - 1;
8965 ueSchCmn->cmn.ueCat = 0; /* Assuming enum values correctly set */
8967 cmInitTimers(&ueSchCmn->cmn.tmr, 1);
8969 /*2. Perform UEs downlink configuration */
8970 ueDl = &ueSchCmn->dl;
8971 /* RACHO : store the rapId assigned for HandOver UE.
8972 * Append UE to handover list of cmnCell */
8973 if (ueCfg->dedPreambleId.pres == PRSNT_NODEF)
8975 rgSCHCmnDelDedPreamble(cell, ueCfg->dedPreambleId.val);
8976 ueDl->rachInfo.hoRapId = ueCfg->dedPreambleId.val;
8977 cmLListAdd2Tail(&cellSchd->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
8978 ueDl->rachInfo.hoLnk.node = (PTR)ue;
8981 rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd);
8983 if (ueCfg->txMode.pres == TRUE)
8985 if ((ueCfg->txMode.txModeEnum == RGR_UE_TM_4) ||
8986 (ueCfg->txMode.txModeEnum == RGR_UE_TM_6))
8988 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
8990 if (ueCfg->txMode.txModeEnum == RGR_UE_TM_3)
8992 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
8995 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
8996 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
8999 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9000 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9001 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9004 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9008 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9011 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
9013 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9014 rgSchTddDlNumHarqProcTbl[cell->ulDlCfgIdx]);
9016 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9017 RGSCH_NUM_DL_HQ_PROC);
9020 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
9022 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
9024 /* if none of the DL and UL AMBR are configured then fail the configuration
9026 if((ueCfg->ueQosCfg.dlAmbr == 0) && (ueCfg->ueQosCfg.ueBr == 0))
9028 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"UL Ambr and DL Ambr are"
9029 "configured as 0 for CRNTI:%d",ueCfg->crnti);
9030 err->errCause = RGSCHERR_SCH_CFG;
9034 ue->dl.ambrCfgd = (ueCfg->ueQosCfg.dlAmbr * RG_SCH_CMN_REFRESH_TIME)/100;
9036 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
9037 allocInfo->rnti = ue->ueId;
9039 /* Initializing the lastCfi value to current cfi value */
9040 ueDl->lastCfi = cellSchd->dl.currCfi;
9042 if(cell->emtcEnable && ue->isEmtcUe)
9044 if ((cellSchd->apisEmtcDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9046 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9047 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9055 if ((cellSchd->apisDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9057 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9058 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9065 /* 3. Initialize ul part */
9066 ueUl = &ueSchCmn->ul;
9068 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
9069 cell->isCpUlExtend);
9071 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9072 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9074 ue->ul.cfgdAmbr = (ueCfg->ueQosCfg.ueBr * RG_SCH_CMN_REFRESH_TIME)/100;
9075 ue->ul.effAmbr = ue->ul.cfgdAmbr;
9076 RGSCHCPYTIMEINFO(cell->crntTime, ue->ul.ulTransTime);
9078 /* Allocate UL BSR allocation tracking List */
9079 cmLListInit(&ueUl->ulAllocLst);
9081 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
9083 if((rgSCHUtlAllocSBuf(cell->instIdx,
9084 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
9086 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation FAILED"
9087 "for CRNTI:%d",ueCfg->crnti);
9088 err->errCause = RGSCHERR_SCH_CFG;
9091 allRcd->allocTime = cell->crntTime;
9092 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
9093 allRcd->lnk.node = (PTR)allRcd;
9095 /* Allocate common sch cntrl blocks for LCGs */
9096 for (cnt=0; cnt<RGSCH_MAX_LCG_PER_UE; cnt++)
9098 ret = rgSCHUtlAllocSBuf(cell->instIdx,
9099 (Data**)&(ue->ul.lcgArr[cnt].sch), (sizeof(RgSchCmnLcg)));
9102 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9103 "SCH struct alloc failed for CRNTI:%d",ueCfg->crnti);
9104 err->errCause = RGSCHERR_SCH_CFG;
9108 /* After initialising UL part, do power related init */
9109 ret = rgSCHPwrUeCfg(cell, ue, ueCfg);
9112 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9113 "power config for UE CRNTI:%d",ueCfg->crnti);
9117 ret = rgSCHCmnSpsUeCfg(cell, ue, ueCfg, err);
9120 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9121 "SPS config for CRNTI:%d",ueCfg->crnti);
9124 #endif /* LTEMAC_SPS */
9127 if(TRUE == ue->isEmtcUe)
9129 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9131 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9132 "for CRNTI:%d",ueCfg->crnti);
9139 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9141 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9142 "for CRNTI:%d",ueCfg->crnti);
9147 /* DLFS UE Config */
9148 if (cellSchd->dl.isDlFreqSel)
9150 if ((cellSchd->apisDlfs->rgSCHDlfsUeCfg(cell, ue, ueCfg, err)) != ROK)
9152 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "DLFS UE config FAILED"
9153 "for CRNTI:%d",ueCfg->crnti);
9158 /* Fix: syed align multiple UEs to refresh at same time */
9159 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9160 /* Start UE Qos Refresh Timer */
9161 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9163 rgSCHCmn5gtfUeCfg(cell, ue, ueCfg);
9167 } /* rgSCHCmnRgrUeCfg */
9170 * @brief UE TX mode reconfiguration handler.
9174 * Function : rgSCHCmnDlHdlTxModeRecfg
9176 * This functions updates UE specific scheduler
9177 * information upon UE reconfiguration.
9179 * @param[in] RgSchUeCb *ue
9180 * @param[in] RgrUeRecfg *ueRecfg
9185 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9189 RgrUeRecfg *ueRecfg,
9193 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, numTxPorts)
9196 RgrUeRecfg *ueRecfg;
9201 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9208 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg)
9211 RgrUeRecfg *ueRecfg;
9215 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9216 TRC2(rgSCHCmnDlHdlTxModeRecfg);
9218 if (ueRecfg->txMode.pres != PRSNT_NODEF)
9222 /* ccpu00140894- Starting Timer for TxMode Transition Completion*/
9223 ue->txModeTransCmplt =FALSE;
9224 rgSCHTmrStartTmr (ue->cell, ue, RG_SCH_TMR_TXMODE_TRNSTN, RG_SCH_TXMODE_TRANS_TIMER);
9225 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_CMPLT)
9227 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell,
9228 RG_SCH_CMN_TD_TXMODE_RECFG);
9229 /* MS_WORKAROUND for ccpu00123186 MIMO Fix Start: need to set FORCE TD bitmap based on TX mode */
9230 ueDl->mimoInfo.ri = 1;
9231 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9232 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9234 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9236 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9238 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9240 /* MIMO Fix End: need to set FORCE TD bitmap based on TX mode */
9243 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_START)
9245 /* start afresh forceTD masking */
9246 RG_SCH_CMN_INIT_FORCE_TD(ue, cell, 0);
9247 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_TXMODE_RECFG);
9248 /* Intialize MIMO related parameters of UE */
9251 if(ueRecfg->txMode.pres)
9253 if((ueRecfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9254 (ueRecfg->txMode.txModeEnum ==RGR_UE_TM_4))
9256 if(ueRecfg->ueCodeBookRstRecfg.pres)
9259 rgSCHCmnComputeRank(ueRecfg->txMode.txModeEnum,
9260 ueRecfg->ueCodeBookRstRecfg.pmiBitMap, numTxPorts);
9264 ueDl->mimoInfo.ri = 1;
9269 ueDl->mimoInfo.ri = 1;
9274 ueDl->mimoInfo.ri = 1;
9277 ueDl->mimoInfo.ri = 1;
9278 #endif /* TFU_UPGRADE */
9279 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9280 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9282 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9284 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9286 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9291 /***********************************************************
9293 * Func : rgSCHCmnUpdUeMimoInfo
9295 * Desc : Updates UL and DL Ue Information
9303 **********************************************************/
9305 PRIVATE Void rgSCHCmnUpdUeMimoInfo
9310 RgSchCmnCell *cellSchd
9313 PRIVATE Void rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd)
9317 RgSchCmnCell *cellSchd;
9320 TRC2(rgSCHCmnUpdUeMimoInfo)
9322 if(ueCfg->txMode.pres)
9324 if((ueCfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9325 (ueCfg->txMode.txModeEnum ==RGR_UE_TM_4))
9327 if(ueCfg->ueCodeBookRstCfg.pres)
9330 rgSCHCmnComputeRank(ueCfg->txMode.txModeEnum,
9331 ueCfg->ueCodeBookRstCfg.pmiBitMap, cell->numTxAntPorts);
9335 ueDl->mimoInfo.ri = 1;
9340 ueDl->mimoInfo.ri = 1;
9345 ueDl->mimoInfo.ri = 1;
9349 ueDl->mimoInfo.ri = 1;
9350 #endif /*TFU_UPGRADE */
9351 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
9352 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
9356 /***********************************************************
9358 * Func : rgSCHCmnUpdUeUlCqiInfo
9360 * Desc : Updates UL and DL Ue Information
9368 **********************************************************/
9370 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo
9375 RgSchCmnUe *ueSchCmn,
9376 RgSchCmnCell *cellSchd,
9380 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd, isEcp)
9384 RgSchCmnUe *ueSchCmn;
9385 RgSchCmnCell *cellSchd;
9390 TRC2(rgSCHCmnUpdUeUlCqiInfo)
9393 if(ue->srsCb.srsCfg.type == RGR_SCH_SRS_SETUP)
9395 if(ue->ul.ulTxAntSel.pres)
9397 ueUl->crntUlCqi[ue->srsCb.selectedAnt] = cellSchd->ul.dfltUlCqi;
9398 ueUl->validUlCqi = ueUl->crntUlCqi[ue->srsCb.selectedAnt];
9402 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9403 ueUl->validUlCqi = ueUl->crntUlCqi[0];
9405 ue->validTxAnt = ue->srsCb.selectedAnt;
9409 ueUl->validUlCqi = cellSchd->ul.dfltUlCqi;
9413 ueUl->ulLaCb.cqiBasediTbs = rgSchCmnUlCqiToTbsTbl[isEcp]
9414 [ueUl->validUlCqi] * 100;
9415 ueUl->ulLaCb.deltaiTbs = 0;
9419 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9420 #endif /*TFU_UPGRADE */
9421 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
9422 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9424 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9428 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9433 /***********************************************************
9435 * Func : rgSCHCmnUpdUeCatCfg
9437 * Desc : Updates UL and DL Ue Information
9445 **********************************************************/
9447 PRIVATE Void rgSCHCmnUpdUeCatCfg
9453 PRIVATE Void rgSCHCmnUpdUeCatCfg(ue, cell)
9458 RgSchDlHqEnt *hqE = NULLP;
9459 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9460 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9461 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
9462 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
9464 TRC2(rgSCHCmnUpdUeCatCfg)
9466 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
9468 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9471 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9472 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9473 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9474 && (RG_SCH_MAX_TX_LYRS_4 == ri))
9476 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9480 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9483 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9485 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9487 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9491 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9493 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9494 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9499 * @brief UE reconfiguration for scheduler.
9503 * Function : rgSChCmnRgrUeRecfg
9505 * This functions updates UE specific scheduler
9506 * information upon UE reconfiguration.
9508 * @param[in] RgSchCellCb *cell
9509 * @param[in] RgSchUeCb *ue
9510 * @param[int] RgrUeRecfg *ueRecfg
9511 * @param[out] RgSchErrInfo *err
9517 PUBLIC S16 rgSCHCmnRgrUeRecfg
9521 RgrUeRecfg *ueRecfg,
9525 PUBLIC S16 rgSCHCmnRgrUeRecfg(cell, ue, ueRecfg, err)
9528 RgrUeRecfg *ueRecfg;
9532 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9535 TRC2(rgSCHCmnRgrUeRecfg);
9536 /* Basic validations */
9537 if (ueRecfg->ueRecfgTypes & RGR_UE_TXMODE_RECFG)
9540 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, cell->numTxAntPorts);
9542 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg);
9543 #endif /* TFU_UPGRADE */
9545 if(ueRecfg->ueRecfgTypes & RGR_UE_CSG_PARAM_RECFG)
9547 ue->csgMmbrSta = ueRecfg->csgMmbrSta;
9549 /* Changes for UE Category reconfiguration feature */
9550 if(ueRecfg->ueRecfgTypes & RGR_UE_UECAT_RECFG)
9552 rgSCHCmnUpdUeCatCfg(ue, cell);
9554 if (ueRecfg->ueRecfgTypes & RGR_UE_APRD_DLCQI_RECFG)
9556 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
9557 pCellInfo->acqiCb.aCqiCfg = ueRecfg->aprdDlCqiRecfg;
9560 if (ueRecfg->ueRecfgTypes & RGR_UE_PRD_DLCQI_RECFG)
9562 if ((ueRecfg->prdDlCqiRecfg.pres == TRUE)
9563 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD10)
9564 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD20))
9566 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unsupported periodic CQI "
9567 "reporting mode %d for old CRNIT:%d",
9568 (int)ueRecfg->prdDlCqiRecfg.prdModeEnum,ueRecfg->oldCrnti);
9569 err->errCause = RGSCHERR_SCH_CFG;
9572 ue->dl.ueDlCqiCfg.prdCqiCfg = ueRecfg->prdDlCqiRecfg;
9576 if (ueRecfg->ueRecfgTypes & RGR_UE_ULPWR_RECFG)
9578 if (rgSCHPwrUeRecfg(cell, ue, ueRecfg) != ROK)
9580 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9581 "Power Reconfiguration Failed for OLD CRNTI:%d",ueRecfg->oldCrnti);
9586 if (ueRecfg->ueRecfgTypes & RGR_UE_QOS_RECFG)
9588 /* Uplink Sched related Initialization */
9589 if ((ueRecfg->ueQosRecfg.dlAmbr == 0) && (ueRecfg->ueQosRecfg.ueBr == 0))
9591 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Ul Ambr and DL Ambr "
9592 "configured as 0 for OLD CRNTI:%d",ueRecfg->oldCrnti);
9593 err->errCause = RGSCHERR_SCH_CFG;
9596 ue->ul.cfgdAmbr = (ueRecfg->ueQosRecfg.ueBr * \
9597 RG_SCH_CMN_REFRESH_TIME)/100;
9598 /* Downlink Sched related Initialization */
9599 ue->dl.ambrCfgd = (ueRecfg->ueQosRecfg.dlAmbr * \
9600 RG_SCH_CMN_REFRESH_TIME)/100;
9601 /* Fix: syed Update the effAmbr and effUeBR fields w.r.t the
9602 * new QOS configuration */
9603 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9604 /* Fix: syed align multiple UEs to refresh at same time */
9605 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9606 rgSCHCmnApplyUeRefresh(cell, ue);
9607 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9610 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9612 if ((cellSchCmn->apisEmtcUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9614 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9615 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9618 if ((cellSchCmn->apisEmtcDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9620 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9621 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9628 if ((cellSchCmn->apisUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9630 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9631 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9634 if ((cellSchCmn->apisDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9636 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9637 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9641 /* DLFS UE Config */
9642 if (cellSchCmn->dl.isDlFreqSel)
9644 if ((cellSchCmn->apisDlfs->rgSCHDlfsUeRecfg(cell, ue, \
9645 ueRecfg, err)) != ROK)
9647 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9648 "DLFS UE re-config FAILED for CRNTI:%d",ue->ueId);
9654 /* Invoke re-configuration on SPS module */
9655 if (rgSCHCmnSpsUeRecfg(cell, ue, ueRecfg, err) != ROK)
9657 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9658 "DL SPS ReCFG FAILED for UE CRNTI:%d", ue->ueId);
9664 } /* rgSCHCmnRgrUeRecfg*/
9666 /***********************************************************
9668 * Func : rgSCHCmnUlUeDelAllocs
9670 * Desc : Deletion of all UE allocations.
9678 **********************************************************/
9680 PRIVATE Void rgSCHCmnUlUeDelAllocs
9686 PRIVATE Void rgSCHCmnUlUeDelAllocs(cell, ue)
9691 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
9692 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
9695 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
9697 TRC2(rgSCHCmnUlUeDelAllocs);
9699 for (i = 0; i < ueUl->hqEnt.numHqPrcs; ++i)
9701 RgSchUlHqProcCb *proc = rgSCHUhmGetUlHqProc(cell, ue, i);
9704 /* proc can't be NULL here */
9712 /* Added Insure Fixes Of reading Dangling memory.NULLed crntAlloc */
9714 if(proc->alloc == ulSpsUe->ulSpsSchdInfo.crntAlloc)
9716 ulSpsUe->ulSpsSchdInfo.crntAlloc = NULLP;
9717 ulSpsUe->ulSpsSchdInfo.crntAllocSf = NULLP;
9721 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9722 proc->alloc,ue->isEmtcUe);
9724 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9727 /* PHY probably needn't be intimated since
9728 * whatever intimation it needs happens at the last minute
9731 /* Fix: syed Adaptive Msg3 Retx crash. Remove the harqProc
9732 * from adaptive retx List. */
9733 if (proc->reTxLnk.node)
9736 //TODO_SID: Need to take care
9737 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
9738 proc->reTxLnk.node = (PTR)NULLP;
9746 /***********************************************************
9748 * Func : rgSCHCmnDelUeFrmRefreshQ
9750 * Desc : Adds a UE to refresh queue, so that the UE is
9751 * periodically triggered to refresh it's GBR and
9760 **********************************************************/
9762 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ
9768 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ(cell, ue)
9773 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
9775 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
9777 TRC2(rgSCHCmnDelUeFrmRefreshQ);
9779 #ifdef RGL_SPECIFIC_CHANGES
9780 if(ue->refreshOffset < RGSCH_MAX_REFRESH_GRPSZ)
9782 if(cell->refreshUeCnt[ue->refreshOffset])
9784 cell->refreshUeCnt[ue->refreshOffset]--;
9790 cmMemset((U8 *)&arg, 0, sizeof(arg));
9791 arg.tqCp = &sched->tmrTqCp;
9792 arg.tq = sched->tmrTq;
9793 arg.timers = &ueSchd->tmr;
9797 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
9803 /***********************************************************
9805 * Func : rgSCHCmnUeCcchSduDel
9807 * Desc : Clear CCCH SDU scheduling context.
9815 **********************************************************/
9817 PRIVATE Void rgSCHCmnUeCcchSduDel
9823 PRIVATE Void rgSCHCmnUeCcchSduDel(cell, ueCb)
9828 RgSchDlHqEnt *hqE = NULLP;
9829 RgSchDlHqProcCb *ccchSduHqP = NULLP;
9830 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
9832 TRC2(rgSCHCmnUeCcchSduDel);
9834 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
9839 ccchSduHqP = hqE->ccchSduProc;
9840 if(ueCb->ccchSduLnk.node != NULLP)
9842 /* Remove the ccchSduProc if it is in the Tx list */
9843 cmLListDelFrm(&(cell->ccchSduUeLst), &(ueCb->ccchSduLnk));
9844 ueCb->ccchSduLnk.node = NULLP;
9846 else if(ccchSduHqP != NULLP)
9848 /* Fix for crash due to stale pdcch. Release ccch pdcch*/
9849 if(ccchSduHqP->pdcch)
9851 cmLListDelFrm(&ccchSduHqP->subFrm->pdcchInfo.pdcchs,
9852 &ccchSduHqP->pdcch->lnk);
9853 cmLListAdd2Tail(&cell->pdcchLst, &ccchSduHqP->pdcch->lnk);
9854 ccchSduHqP->pdcch = NULLP;
9856 if(ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node != NULLP)
9858 /* Remove the ccchSduProc if it is in the retx list */
9859 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
9860 &ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk);
9861 /* ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node = NULLP; */
9862 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9864 else if ((ccchSduHqP->subFrm != NULLP) &&
9865 (ccchSduHqP->hqPSfLnk.node != NULLP))
9867 rgSCHUtlDlHqPTbRmvFrmTx(ccchSduHqP->subFrm,
9868 ccchSduHqP, 0, FALSE);
9869 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9879 * @brief UE deletion for scheduler.
9883 * Function : rgSCHCmnUeDel
9885 * This functions deletes all scheduler information
9886 * pertaining to an UE.
9888 * @param[in] RgSchCellCb *cell
9889 * @param[in] RgSchUeCb *ue
9893 PUBLIC Void rgSCHCmnUeDel
9899 PUBLIC Void rgSCHCmnUeDel(cell, ue)
9904 RgSchDlHqEnt *hqE = NULLP;
9905 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9907 RgSchCmnAllocRecord *allRcd;
9909 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9911 TRC2(rgSCHCmnUeDel);
9913 if (RG_SCH_CMN_GET_UE(ue,cell) == NULLP)
9915 /* Common scheduler config has not happened yet */
9918 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9921 /* UE Free can be triggered before MSG4 done when dlHqE is not updated */
9925 rgSCHEmtcCmnUeCcchSduDel(cell, ue);
9930 rgSCHCmnUeCcchSduDel(cell, ue);
9933 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9935 rgSCHCmnUlUeDelAllocs(cell, ue);
9937 rgSCHCmnDelRachInfo(cell, ue);
9940 if(TRUE == ue->isEmtcUe)
9942 cellSchCmn->apisEmtcUl->rgSCHFreeUlUe(cell, ue);
9947 cellSchCmn->apisUl->rgSCHFreeUlUe(cell, ue);
9952 for(idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
9954 if(ue->cellInfo[idx] != NULLP)
9956 rgSCHSCellDelUeSCell(cell,ue,idx);
9963 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9965 cellSchCmn->apisEmtcDl->rgSCHFreeDlUe(cell, ue);
9970 cellSchCmn->apisDl->rgSCHFreeDlUe(cell, ue);
9972 rgSCHPwrUeDel(cell, ue);
9975 rgSCHCmnSpsUeDel(cell, ue);
9976 #endif /* LTEMAC_SPS*/
9979 rgSchCmnDlSfHqDel(ue, cell);
9981 /* DLFS UE delete */
9982 if (cellSchCmn->dl.isDlFreqSel)
9984 cellSchCmn->apisDlfs->rgSCHDlfsUeDel(cell, ue);
9986 node = ueUl->ulAllocLst.first;
9988 /* ccpu00117052 - MOD - Passing double pointer in all the places of
9989 rgSCHUtlFreeSBuf function call for proper NULLP assignment*/
9992 allRcd = (RgSchCmnAllocRecord *)node->node;
9994 cmLListDelFrm(&ueUl->ulAllocLst, &allRcd->lnk);
9995 rgSCHUtlFreeSBuf(cell->instIdx,
9996 (Data**)(&allRcd), (sizeof(RgSchCmnAllocRecord)));
9999 for(cnt = 0; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
10001 if (ue->ul.lcgArr[cnt].sch != NULLP)
10003 rgSCHUtlFreeSBuf(cell->instIdx,
10004 (Data**)(&(ue->ul.lcgArr[cnt].sch)), (sizeof(RgSchCmnLcg)));
10008 /* Fix : syed Moved hqEnt deinit to rgSCHCmnDlDeInitHqEnt */
10009 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId) & (CM_LTE_MAX_CELLS - 1));
10010 rgSCHUtlFreeSBuf(cell->instIdx,
10011 (Data**)(&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch))), (sizeof(RgSchCmnUe)));
10013 } /* rgSCHCmnUeDel */
10017 * @brief This function handles the common code rate configurations
10018 * done as part of RgrCellCfg/RgrCellRecfg.
10022 * Function: rgSCHCmnDlCnsdrCmnRt
10023 * Purpose: This function handles the common code rate configurations
10024 * done as part of RgrCellCfg/RgrCellRecfg.
10026 * Invoked by: Scheduler
10028 * @param[in] RgSchCellCb *cell
10029 * @param[in] RgrDlCmnCodeRateCfg *dlCmnCodeRate
10034 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt
10037 RgrDlCmnCodeRateCfg *dlCmnCodeRate
10040 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt(cell, dlCmnCodeRate)
10042 RgrDlCmnCodeRateCfg *dlCmnCodeRate;
10045 RgSchCmnCell *cellDl = RG_SCH_CMN_GET_CELL(cell);
10052 TRC2(rgSCHCmnDlCnsdrCmnRt);
10054 /* code rate is bits per 1024 phy bits, since modl'n scheme is 2. it is
10055 * bits per 1024/2 REs */
10056 if (dlCmnCodeRate->bcchPchRaCodeRate != 0)
10058 bitsPerRb = ((dlCmnCodeRate->bcchPchRaCodeRate * 2) *
10059 cellDl->dl.noResPerRb[3])/1024;
10063 bitsPerRb = ((RG_SCH_CMN_DEF_BCCHPCCH_CODERATE * 2) *
10064 cellDl->dl.noResPerRb[3])/1024;
10066 /* Store bitsPerRb in cellDl->dl to use later to determine
10067 * Number of RBs for UEs with SI-RNTI, P-RNTI and RA-RNTI */
10068 cellDl->dl.bitsPerRb = bitsPerRb;
10069 /* ccpu00115595 end*/
10070 /* calculate the ITbs for 2 RBs. Initialize ITbs to MAX value */
10073 bitsPer2Rb = bitsPerRb * rbNum;
10074 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer2Rb))
10077 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs2Rbs = 0) :
10078 (cellDl->dl.cmnChITbs.iTbs2Rbs = i-1);
10080 /* calculate the ITbs for 3 RBs. Initialize ITbs to MAX value */
10083 bitsPer3Rb = bitsPerRb * rbNum;
10084 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer3Rb))
10087 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs3Rbs = 0) :
10088 (cellDl->dl.cmnChITbs.iTbs3Rbs = i-1);
10091 pdcchBits = 1 + /* Flag for format0/format1a differentiation */
10092 1 + /* Localized/distributed VRB assignment flag */
10095 3 + /* Harq process Id */
10097 4 + /* Harq process Id */
10098 2 + /* UL Index or DAI */
10100 1 + /* New Data Indicator */
10103 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
10104 (cell->bwCfg.dlTotalBw + 1))/2);
10105 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
10106 Since VRB is local */
10107 /* For TDD consider DAI */
10109 /* Convert the pdcchBits to actual pdcchBits required for transmission */
10110 if (dlCmnCodeRate->pdcchCodeRate != 0)
10112 pdcchBits = (pdcchBits * 1024)/dlCmnCodeRate->pdcchCodeRate;
10113 if (pdcchBits <= 288) /* 288 : Num of pdcch bits for aggrLvl=4 */
10115 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10117 else /* 576 : Num of pdcch bits for aggrLvl=8 */
10119 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL8;
10124 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10126 if (dlCmnCodeRate->ccchCqi == 0)
10132 cellDl->dl.ccchCqi = dlCmnCodeRate->ccchCqi;
10139 * @brief This function handles the configuration of cell for the first
10140 * time by the scheduler.
10144 * Function: rgSCHCmnDlRgrCellCfg
10145 * Purpose: Configuration received is stored into the data structures
10146 * Also, update the scheduler with the number of frames of
10147 * RACH preamble transmission.
10149 * Invoked by: BO and Scheduler
10151 * @param[in] RgSchCellCb* cell
10152 * @param[in] RgrCellCfg* cfg
10157 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10164 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10170 RgSchCmnCell *cellSch;
10175 U8 maxDlSubfrms = cell->numDlSubfrms;
10176 U8 splSubfrmIdx = cfg->spclSfCfgIdx;
10179 RgSchTddSubfrmInfo subfrmInfo = rgSchTddMaxUlSubfrmTbl[cell->ulDlCfgIdx];
10190 TRC2(rgSCHCmnDlRgrCellCfg);
10193 cellSch = RG_SCH_CMN_GET_CELL(cell);
10194 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->\
10195 rachCfg.preambleFormat];
10196 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10197 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10199 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10200 3 TTI (MAX L1+L2 processing delay at the UE) */
10201 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10202 rgSchCmnHarqRtt[cell->ulDlCfgIdx] + 3;
10203 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10204 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10205 if (cfg->maxUePerDlSf == 0)
10207 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10209 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10215 if (cell->bwCfg.dlTotalBw <= 10)
10225 /* DwPTS Scheduling Changes Start */
10226 cellSch->dl.splSfCfg = splSubfrmIdx;
10228 if (cfg->isCpDlExtend == TRUE)
10230 if((0 == splSubfrmIdx) || (4 == splSubfrmIdx) ||
10231 (7 == splSubfrmIdx) || (8 == splSubfrmIdx)
10234 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10238 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10243 /* Refer to 36.213 Section 7.1.7 */
10244 if((0 == splSubfrmIdx) || (5 == splSubfrmIdx))
10246 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10250 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10253 /* DwPTS Scheduling Changes End */
10255 splSfCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10256 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
10258 for (sfCount = 0; sfCount < maxDlSubfrms; sfCount++)
10260 sf = cell->subFrms[sfCount];
10261 /* Sfcount matches the first special subframe occurs at Index 0
10262 * or subsequent special subframes */
10263 if(subfrmInfo.switchPoints == 1)
10265 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10266 RG_SCH_CMN_10_MS_PRD, &subfrmInfo);
10270 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10271 RG_SCH_CMN_5_MS_PRD, &subfrmInfo);
10273 if(isSplfrm == TRUE)
10276 /* DwPTS Scheduling Changes Start */
10277 if (cell->splSubfrmCfg.isDlDataAllowed == TRUE)
10279 sf->sfType = RG_SCH_SPL_SF_DATA;
10283 sf->sfType = RG_SCH_SPL_SF_NO_DATA;
10285 /* DwPTS Scheduling Changes End */
10289 /* DwPTS Scheduling Changes Start */
10290 if (sf->sfNum != 0)
10292 sf->sfType = RG_SCH_DL_SF;
10296 sf->sfType = RG_SCH_DL_SF_0;
10298 /* DwPTS Scheduling Changes End */
10301 /* Calculate the number of CCEs per subframe in the cell */
10302 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][sf->sfNum];
10303 if(cell->dynCfiCb.isDynCfiEnb == TRUE)
10305 /* In case if Dynamic CFI feature is enabled, default CFI
10306 * value 1 is used */
10307 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][1];
10311 if (sf->sfType == RG_SCH_SPL_SF_DATA)
10313 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
10317 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi)];
10322 /* Intialize the RACH response scheduling related infromation */
10323 if(rgSCHCmnDlRachInfoInit(cell) != ROK)
10328 /* Allocate PRACH preamble list */
10329 rgSCHCmnDlCreateRachPrmLst(cell);
10331 /* Initialize PHICH offset information */
10332 rgSCHCmnDlPhichOffsetInit(cell);
10334 /* Update the size of HARQ ACK/NACK feedback table */
10335 /* The array size is increased by 2 to have enough free indices, where other
10336 * indices are busy waiting for HARQ feedback */
10337 cell->ackNackFdbkArrSize = rgSchTddANFdbkMapTbl[cell->ulDlCfgIdx] + 2;
10339 /* Initialize expected HARQ ACK/NACK feedback time */
10340 rgSCHCmnDlANFdbkInit(cell);
10342 /* Initialize UL association set index */
10343 if(cell->ulDlCfgIdx != 0)
10345 rgSCHCmnDlKdashUlAscInit(cell);
10348 if (cfg->isCpDlExtend == TRUE)
10350 cp = RG_SCH_CMN_EXT_CP;
10352 cell->splSubfrmCfg.dwPts =
10353 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlDwPts;
10355 if ( cell->splSubfrmCfg.dwPts == 0 )
10357 cell->isDwPtsCnted = FALSE;
10361 cell->isDwPtsCnted = TRUE;
10364 if(cfg->isCpUlExtend == TRUE)
10366 cell->splSubfrmCfg.upPts =
10367 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlExtUpPts;
10371 cell->splSubfrmCfg.upPts =
10372 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlNorUpPts;
10377 cp = RG_SCH_CMN_NOR_CP;
10379 cell->splSubfrmCfg.dwPts =
10380 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlDwPts;
10381 cell->isDwPtsCnted = TRUE;
10383 if(cfg->isCpUlExtend == TRUE)
10385 cell->splSubfrmCfg.upPts =
10386 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlExtUpPts;
10390 cell->splSubfrmCfg.upPts =
10391 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlNorUpPts;
10395 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10396 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++,cfiIdx++)
10398 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10399 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10400 [cell->numTxAntPorts]][cfiIdx];
10401 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10402 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10403 [cell->numTxAntPorts]][cfiIdx];
10406 /* Initializing the values of CFI parameters */
10407 if(cell->dynCfiCb.isDynCfiEnb)
10409 /* If DCFI is enabled, current CFI value will start from 1 */
10410 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10414 /* If DCFI is disabled, current CFI value is set as default max allowed CFI value */
10415 cellSch->dl.currCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10416 cellSch->dl.newCfi = cellSch->dl.currCfi;
10419 /* Include CRS REs while calculating Efficiency
10420 * The number of Resource Elements occupied by CRS depends on Number of
10421 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10422 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10423 * details of the same. Please note that PDCCH overlap symbols would not
10424 * considered in CRS REs deduction */
10425 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10427 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10428 - numPdcchSym) *RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10431 /* DwPTS Scheduling Changes Start */
10432 antPortIdx = (cell->numTxAntPorts == 1)? 0:
10433 ((cell->numTxAntPorts == 2)? 1: 2);
10435 if (cp == RG_SCH_CMN_NOR_CP)
10437 splSfIdx = (splSubfrmIdx == 4)? 1: 0;
10441 splSfIdx = (splSubfrmIdx == 3)? 1: 0;
10444 numCrs = rgSchCmnDwptsCrs[splSfIdx][antPortIdx];
10446 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI-1; cfi++)
10448 /* If CFI is 2 and Ant Port is 4, don't consider the sym 1 CRS REs */
10449 if (antPortIdx == 2 && cfi == 2)
10453 cellSch->dl.numReDwPts[cfi] = ((cell->splSubfrmCfg.dwPts - cfi)*
10454 RB_SCH_CMN_NUM_SCS_PER_RB) - numCrs;
10456 /* DwPTS Scheduling Changes End */
10458 if (cfg->maxDlBwPerUe == 0)
10460 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10464 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10466 if (cfg->maxDlRetxBw == 0)
10468 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10472 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10474 /* Fix: MUE_PERTTI_DL*/
10475 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10476 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10477 if (cfg->maxUePerDlSf == 0)
10479 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10481 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10482 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10483 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10485 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10486 "Invalid configuration !: "
10487 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10488 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10492 else if (!cfg->maxCcchPerDlSf)
10494 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10495 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10496 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10497 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10498 * FLE crash in PHY as PHY has limit of 16 max*/
10499 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10503 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10505 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10510 /*ccpu00118273 - ADD - start */
10511 cmLListInit(&cellSch->dl.msg4RetxLst);
10513 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10516 #ifdef RG_PHASE2_SCHED
10517 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10519 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10521 if (cfg->dlfsCfg.isDlFreqSel)
10523 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10529 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10532 /* Power related configuration */
10533 ret = rgSCHPwrCellCfg(cell, cfg);
10539 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10540 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10541 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10542 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10543 cellSch->dl.msg4pAVal = cfg->msg4pAVal;
10546 #else /* LTE_TDD */
10548 * @brief This function handles the configuration of cell for the first
10549 * time by the scheduler.
10553 * Function: rgSCHCmnDlRgrCellCfg
10554 * Purpose: Configuration received is stored into the data structures
10555 * Also, update the scheduler with the number of frames of
10556 * RACH preamble transmission.
10558 * Invoked by: BO and Scheduler
10560 * @param[in] RgSchCellCb* cell
10561 * @param[in] RgrCellCfg* cfg
10562 * @param[in] RgSchErrInfo* err
10567 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10574 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10581 RgSchCmnCell *cellSch;
10588 TRC2(rgSCHCmnDlRgrCellCfg);
10590 cellSch = RG_SCH_CMN_GET_CELL(cell);
10592 /* Initialize the parameters with the ones received in the */
10593 /* configuration. */
10595 /* Added matrix 'rgRaPrmblToRaFrmTbl' for computation of RA
10596 * sub-frames from preamble format */
10597 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->rachCfg.preambleFormat];
10599 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10600 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10602 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10603 3 TTI (MAX L1+L2 processing delay at the UE) */
10604 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10605 rgSchCmnHarqRtt[7] + 3;
10607 if (cell->bwCfg.dlTotalBw <= 10)
10618 if (cell->isCpDlExtend == TRUE)
10620 cp = RG_SCH_CMN_EXT_CP;
10625 cp = RG_SCH_CMN_NOR_CP;
10629 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10630 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, cfiIdx++)
10632 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10634 cellSch->dl.emtcCqiToTbsTbl[0][cfi] = rgSchEmtcCmnCqiToTbs[0][cp][cfiIdx];
10636 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10637 [cell->numTxAntPorts]][cfiIdx];
10638 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10640 cellSch->dl.emtcCqiToTbsTbl[1][cfi] = rgSchEmtcCmnCqiToTbs[1][cp][cfiIdx];
10642 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10643 [cell->numTxAntPorts]][cfiIdx];
10646 /* Initializing the values of CFI parameters */
10647 if(cell->dynCfiCb.isDynCfiEnb)
10649 /* If DCFI is enabled, current CFI value will start from 1 */
10650 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10654 /* If DCFI is disabled, current CFI value is set as default CFI value */
10655 cellSch->dl.currCfi = cellSch->cfiCfg.cfi;
10656 cellSch->dl.newCfi = cellSch->dl.currCfi;
10659 /* Include CRS REs while calculating Efficiency
10660 * The number of Resource Elements occupied by CRS depends on Number of
10661 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10662 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10663 * details of the same. Please note that PDCCH overlap symbols would not
10664 * considered in CRS REs deduction */
10665 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10667 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10668 - numPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10671 if (cfg->maxDlBwPerUe == 0)
10673 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10677 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10679 if (cfg->maxDlRetxBw == 0)
10681 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10685 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10688 /* Fix: MUE_PERTTI_DL*/
10689 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10690 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10691 if (cfg->maxUePerDlSf == 0)
10693 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10695 /* Fix: MUE_PERTTI_DL syed validating Cell Configuration */
10696 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10698 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10699 "FAILED MaxUePerDlSf(%u) < MaxDlUeNewTxPerTti(%u)",
10700 cellSch->dl.maxUePerDlSf,
10701 cellSch->dl.maxUeNewTxPerTti);
10704 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10705 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10707 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid configuration !: "
10708 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10709 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10713 else if (!cfg->maxCcchPerDlSf)
10715 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10716 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10717 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10718 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10719 * FLE crash in PHY as PHY has limit of 16 max*/
10720 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10724 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10728 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10732 cmLListInit(&cellSch->dl.msg4RetxLst);
10734 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10737 #ifdef RG_PHASE2_SCHED
10738 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10740 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10742 if (cfg->dlfsCfg.isDlFreqSel)
10744 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10750 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10753 /* Power related configuration */
10754 ret = rgSCHPwrCellCfg(cell, cfg);
10760 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10761 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10762 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10763 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10764 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10767 #endif /* LTE_TDD */
10769 /***********************************************************
10771 * Func : rgSCHCmnUlCalcReqRbCeil
10773 * Desc : Calculate RB required to satisfy 'bytes' for
10775 * Returns number of RBs such that requirement
10776 * is necessarily satisfied (does a 'ceiling'
10779 * Ret : Required RBs (U8)
10785 **********************************************************/
10787 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil
10791 RgSchCmnUlCell *cellUl
10794 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil(bytes, cqi, cellUl)
10797 RgSchCmnUlCell *cellUl;
10800 U32 numRe = RGSCH_CEIL((bytes * 8) * 1024, rgSchCmnUlCqiTbl[cqi].eff);
10801 TRC2(rgSCHCmnUlCalcReqRbCeil);
10802 RETVALUE((U8)RGSCH_CEIL(numRe, RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl)));
10805 /***********************************************************
10807 * Func : rgSCHCmnPrecompMsg3Vars
10809 * Desc : Precomputes the following for msg3 allocation:
10810 * 1. numSb and Imcs for msg size A
10811 * 2. numSb and Imcs otherwise
10815 * Notes: The corresponding vars in cellUl struct is filled
10820 **********************************************************/
10822 PRIVATE S16 rgSCHCmnPrecompMsg3Vars
10824 RgSchCmnUlCell *cellUl,
10831 PRIVATE S16 rgSCHCmnPrecompMsg3Vars(cellUl, ccchCqi, msgSzA, sbSize, isEcp)
10832 RgSchCmnUlCell *cellUl;
10844 U16 msg3GrntSz = 0;
10846 TRC2(rgSCHCmnPrecompMsg3Vars);
10848 if (ccchCqi > cellUl->max16qamCqi)
10850 ccchCqi = cellUl->max16qamCqi;
10852 /* #ifndef RG_SCH_CMN_EXP_CP_SUP For ECP Pick the index 1 */
10854 ccchTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10855 ccchMcs = rgSCHCmnUlGetIMcsFrmITbs(ccchTbs, CM_LTE_UE_CAT_1);
10857 /* MCS should fit in 4 bits in RAR */
10863 /* Limit the ccchMcs to 15 as it
10864 * can be inferred from 36.213, section 6.2 that msg3 imcs
10866 * Since, UE doesn't exist right now, we use CAT_1 for ue
10868 while((ccchMcs = (rgSCHCmnUlGetIMcsFrmITbs(
10869 rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi],CM_LTE_UE_CAT_1))
10871 RG_SCH_CMN_MAX_MSG3_IMCS)
10876 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10878 if (msgSzA < RGSCH_MIN_MSG3_GRNT_SZ)
10882 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(msgSzA, ccchCqi, cellUl), sbSize);
10884 numRb = numSb * sbSize;
10885 msg3GrntSz = 8 * msgSzA;
10887 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10890 numRb = numSb * sbSize;
10892 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10896 /* Reversed(Corrected) the assignment for preamble-GrpA
10897 * Refer- TG36.321- section- 5.1.2*/
10898 cellUl->ra.prmblBNumSb = numSb;
10899 cellUl->ra.prmblBIMcs = ccchMcs;
10900 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(RGSCH_MIN_MSG3_GRNT_SZ, \
10904 numRb = numSb * sbSize;
10905 msg3GrntSz = 8 * RGSCH_MIN_MSG3_GRNT_SZ;
10906 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10909 numRb = numSb * sbSize;
10911 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10915 /* Reversed(Corrected) the assignment for preamble-GrpA
10916 * Refer- TG36.321- section- 5.1.2*/
10917 cellUl->ra.prmblANumSb = numSb;
10918 cellUl->ra.prmblAIMcs = ccchMcs;
10922 PUBLIC U32 gPrntPucchDet=0;
10925 /***********************************************************
10927 * Func : rgSCHCmnUlCalcAvailBw
10929 * Desc : Calculates bandwidth available for PUSCH scheduling.
10931 * Ret : S16 (ROK/RFAILED)
10937 **********************************************************/
10939 PRIVATE S16 rgSCHCmnUlCalcAvailBw
10942 RgrCellCfg *cellCfg,
10948 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
10950 RgrCellCfg *cellCfg;
10957 U8 ulBw = cell->bwCfg.ulTotalBw;
10958 U8 n2Rb = cell->pucchCfg.resourceSize;
10959 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
10960 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
10961 U8 n1Cs = cell->pucchCfg.cyclicShift;
10968 U8 exclRb; /* RBs to exclude */
10971 /* To avoid PUCCH and PUSCH collision issue */
10975 /* Maximum value of M as per Table 10.1-1 */
10976 U8 M[RGSCH_MAX_TDD_UL_DL_CFG] = {1, 2, 4, 3, 4, 9, 1};
10978 TRC2(rgSCHCmnUlCalcAvailBw);
10980 if (cell->isCpUlExtend)
10985 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
10987 /* Considering the max no. of CCEs for PUSCH BW calculation
10988 * based on min mi value */
10989 if (cell->ulDlCfgIdx == 0 || cell->ulDlCfgIdx == 6)
10998 totalCce = cell->dynCfiCb.cfi2NCceTbl[mi][cfi];
11000 P = rgSCHCmnGetPValFrmCCE(cell, totalCce-1);
11001 n1PlusOne = cell->rgSchTddNpValTbl[P + 1];
11002 n1Max = (M[cell->ulDlCfgIdx] - 1)*n1PlusOne + (totalCce-1) + n1Pucch;
11004 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11006 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11007 n1Rb = (n1Max - n1RbPart)/ n1PerRb;
11008 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11010 /* get the total Number of RB's to be excluded for PUSCH */
11012 if(n1Pucch < n1RbPart)
11018 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11020 puschRbStart = exclRb/2 + 1;
11022 /* Num of PUCCH RBs = puschRbStart*2 */
11023 if (puschRbStart * 2 >= ulBw)
11025 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11029 *rbStartRef = puschRbStart;
11030 *bwAvailRef = ulBw - puschRbStart * 2;
11032 if(cell->pucchCfg.maxPucchRb !=0 &&
11033 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11035 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11042 /***********************************************************
11044 * Func : rgSCHCmnUlCalcAvailBw
11046 * Desc : Calculates bandwidth available for PUSCH scheduling.
11048 * Ret : S16 (ROK/RFAILED)
11054 **********************************************************/
11056 PRIVATE S16 rgSCHCmnUlCalcAvailBw
11059 RgrCellCfg *cellCfg,
11065 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
11067 RgrCellCfg *cellCfg;
11074 U8 ulBw = cell->bwCfg.ulTotalBw;
11075 U8 n2Rb = cell->pucchCfg.resourceSize;
11076 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
11077 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
11078 U8 n1Cs = cell->pucchCfg.cyclicShift;
11084 U8 exclRb; /* RBs to exclude */
11088 U16 numOfN3PucchRb;
11089 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11092 TRC2(rgSCHCmnUlCalcAvailBw);
11094 if (cell->isCpUlExtend)
11099 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
11101 totalCce = cell->dynCfiCb.cfi2NCceTbl[0][cfi];
11103 n1Max = n1Pucch + totalCce-1;
11105 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11107 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11108 n1Rb = (U8)((n1Max - n1RbPart) / n1PerRb);
11109 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11111 /* get the total Number of RB's to be excluded for PUSCH */
11113 if(n1Pucch < n1RbPart)
11119 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11121 /*Support for PUCCH Format 3*/
11123 if (cell->isPucchFormat3Sptd)
11125 numOfN3PucchRb = RGSCH_CEIL(cellSch->dl.maxUePerDlSf,5);
11126 exclRb = exclRb + numOfN3PucchRb;
11129 puschRbStart = exclRb/2 + 1;
11133 #ifndef ALIGN_64BIT
11134 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%ld:%d:%d:%d:%d:%d]\n",
11135 cell->crntTime.sfn, cell->crntTime.subframe, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11137 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%d:%d:%d:%d:%d:%d]\n",
11138 cell->crntTime.sfn, cell->crntTime.subframe, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11142 if (puschRbStart*2 >= ulBw)
11144 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11148 *rbStartRef = puschRbStart;
11149 *bwAvailRef = ulBw - puschRbStart * 2;
11151 if(cell->pucchCfg.maxPucchRb !=0 &&
11152 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11154 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11163 /***********************************************************
11165 * Func : rgSCHCmnUlCellInit
11167 * Desc : Uplink scheduler initialisation for cell.
11175 **********************************************************/
11177 PRIVATE S16 rgSCHCmnUlCellInit
11180 RgrCellCfg *cellCfg
11183 PRIVATE S16 rgSCHCmnUlCellInit(cell, cellCfg)
11185 RgrCellCfg *cellCfg;
11189 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11190 U8 maxUePerUlSf = cellCfg->maxUePerUlSf;
11192 /* Added configuration for maximum number of MSG3s */
11193 U8 maxMsg3PerUlSf = cellCfg->maxMsg3PerUlSf;
11195 U8 maxUlBwPerUe = cellCfg->maxUlBwPerUe;
11196 U8 sbSize = cellCfg->puschSubBand.size;
11204 U16 ulDlCfgIdx = cell->ulDlCfgIdx;
11205 /* [ccpu00127294]-MOD-Change the max Ul subfrms size in TDD */
11206 U8 maxSubfrms = 2 * rgSchTddNumUlSf[ulDlCfgIdx];
11207 U8 ulToDlMap[12] = {0}; /* maximum 6 Subframes in UL * 2 */
11208 U8 maxUlsubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
11209 [RGSCH_NUM_SUB_FRAMES-1];
11213 U8 maxSubfrms = RG_SCH_CMN_UL_NUM_SF;
11219 #if (defined(LTE_L2_MEAS) )
11220 Inst inst = cell->instIdx;
11221 #endif /* #if (defined(LTE_L2_MEAS) || defined(DEBUGP) */
11222 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
11224 TRC2(rgSCHCmnUlCellInit);
11226 cellUl->maxUeNewTxPerTti = cellCfg->maxUlUeNewTxPerTti;
11227 if (maxUePerUlSf == 0)
11229 maxUePerUlSf = RG_SCH_CMN_MAX_UE_PER_UL_SF;
11232 if (maxMsg3PerUlSf == 0)
11234 maxMsg3PerUlSf = RG_SCH_CMN_MAX_MSG3_PER_UL_SF;
11236 /* fixed the problem while sending raRsp
11237 * if maxMsg3PerUlSf is greater than
11238 * RGSCH_MAX_RNTI_PER_RARNTI
11240 if(maxMsg3PerUlSf > RGSCH_MAX_RNTI_PER_RARNTI)
11242 maxMsg3PerUlSf = RGSCH_MAX_RNTI_PER_RARNTI;
11245 if(maxMsg3PerUlSf > maxUePerUlSf)
11247 maxMsg3PerUlSf = maxUePerUlSf;
11250 /*cellUl->maxAllocPerUlSf = maxUePerUlSf + maxMsg3PerUlSf;*/
11251 /*Max MSG3 should be a subset of Max UEs*/
11252 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11253 cellUl->maxMsg3PerUlSf = maxMsg3PerUlSf;
11255 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11257 /* Fix: MUE_PERTTI_UL syed validating Cell Configuration */
11258 if (cellUl->maxAllocPerUlSf < cellUl->maxUeNewTxPerTti)
11260 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
11261 "FAILED: MaxUePerUlSf(%u) < MaxUlUeNewTxPerTti(%u)",
11262 cellUl->maxAllocPerUlSf,
11263 cellUl->maxUeNewTxPerTti);
11269 for(idx = 0; idx < RGSCH_SF_ALLOC_SIZE; idx++)
11271 for(idx = 0; idx < RGSCH_NUM_SUB_FRAMES; idx++)
11275 ret = rgSCHUtlAllocSBuf(inst, (Data **)&(cell->sfAllocArr[idx].
11276 ulUeInfo.ulAllocInfo), (cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc)));
11279 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation failed ");
11284 if (maxUlBwPerUe == 0)
11286 /* ccpu00139362- Setting to configured UL BW instead of MAX BW(100)*/
11287 maxUlBwPerUe = cell->bwCfg.ulTotalBw;
11289 cellUl->maxUlBwPerUe = maxUlBwPerUe;
11291 /* FOR RG_SCH_CMN_EXT_CP_SUP */
11292 if (!cellCfg->isCpUlExtend)
11294 cellUl->ulNumRePerRb = 12 * (14 - RGSCH_UL_SYM_DMRS_SRS);
11298 cellUl->ulNumRePerRb = 12 * (12 - RGSCH_UL_SYM_DMRS_SRS);
11301 if (sbSize != rgSchCmnMult235Tbl[sbSize].match)
11303 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Invalid subband size %d", sbSize);
11306 //Setting the subband size to 4 which is size of VRBG in 5GTF
11308 sbSize = MAX_5GTF_VRBG_SIZE;
11311 maxSbPerUe = maxUlBwPerUe / sbSize;
11312 if (maxSbPerUe == 0)
11314 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnUlCellInit(): "
11315 "maxUlBwPerUe/sbSize is zero");
11318 cellUl->maxSbPerUe = rgSchCmnMult235Tbl[maxSbPerUe].prvMatch;
11320 /* CQI related updations */
11321 if ((!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->ulCmnCodeRate.ccchCqi))
11322 || (!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->trgUlCqi.trgCqi)))
11324 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnUlCellInit(): "
11328 cellUl->dfltUlCqi = cellCfg->ulCmnCodeRate.ccchCqi;
11330 /* Changed the logic to determine maxUlCqi.
11331 * For a 16qam UE, maxUlCqi is the CQI Index at which
11332 * efficiency is as close as possible to RG_SCH_MAX_CODE_RATE_16QAM
11333 * Refer to 36.213-8.6.1 */
11334 for (i = RG_SCH_CMN_UL_NUM_CQI - 1;i > 0; --i)
11336 RLOG_ARG2(L_INFO,DBG_CELLID,cell->cellId,
11339 rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i]);
11340 #ifdef MAC_SCH_STATS
11341 /* ccpu00128489 ADD Update mcs in hqFailStats here instead of at CRC
11342 * since CQI to MCS mapping does not change. The only exception is for
11343 * ITBS = 19 where the MCS can be 20 or 21 based on the UE cat. We
11344 * choose 20, instead of 21, ie UE_CAT_3 */
11345 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11346 RG_SCH_CMN_UL_TBS_TO_MCS(iTbs, hqFailStats.ulCqiStat[i - 1].mcs);
11349 for (i = RG_SCH_CMN_UL_NUM_CQI - 1; i != 0; --i)
11351 /* Fix for ccpu00123912*/
11352 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11353 if (iTbs <= RGSCH_UL_16QAM_MAX_ITBS) /* corresponds to 16QAM */
11355 RLOG_ARG1(L_INFO,DBG_CELLID,cell->cellId,
11356 "16 QAM CQI %u", i);
11357 cellUl->max16qamCqi = i;
11363 /* Precompute useful values for RA msg3 */
11364 ret = rgSCHCmnPrecompEmtcMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11365 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11372 /* Precompute useful values for RA msg3 */
11373 ret = rgSCHCmnPrecompMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11374 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11380 cellUl->sbSize = sbSize;
11383 cellUl->numUlSubfrms = maxSubfrms;
11385 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->ulSfArr,
11386 cellUl->numUlSubfrms * sizeof(RgSchUlSf));
11390 cellUl->numUlSubfrms = 0;
11394 /* store the DL subframe corresponding to the PUSCH offset
11395 * in their respective UL subframe */
11396 for(i=0; i < RGSCH_NUM_SUB_FRAMES; i++)
11398 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][i] != 0)
11400 subfrm = (i + rgSchTddPuschTxKTbl[ulDlCfgIdx][i]) % \
11401 RGSCH_NUM_SUB_FRAMES;
11402 subfrm = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subfrm]-1;
11403 dlIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][i]-1;
11404 RGSCH_ARRAY_BOUND_CHECK( cell->instIdx, ulToDlMap, subfrm);
11405 ulToDlMap[subfrm] = dlIdx;
11408 /* Copy the information in the remaining UL subframes based
11409 * on number of HARQ processes */
11410 for(i=maxUlsubfrms; i < maxSubfrms; i++)
11412 subfrm = i-maxUlsubfrms;
11413 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, i);
11414 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, subfrm)
11415 ulToDlMap[i] = ulToDlMap[subfrm];
11419 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++)
11422 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11424 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11433 cell->ulAvailBw = bwAvail;
11436 numSb = bwAvail/sbSize;
11438 cell->dynCfiCb.bwInfo[cfi].startRb = rbStart;
11439 cell->dynCfiCb.bwInfo[cfi].numSb = numSb;
11442 if(0 == cell->dynCfiCb.maxCfi)
11444 RLOG_ARG3(L_ERROR,DBG_CELLID,cell->cellId,
11445 "Incorrect Default CFI(%u), maxCfi(%u), maxPucchRb(%d)",
11446 cellSch->cfiCfg.cfi, cell->dynCfiCb.maxCfi,
11447 cell->pucchCfg.maxPucchRb);
11453 cellUl->dmrsArrSize = cell->dynCfiCb.bwInfo[1].numSb;
11454 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->dmrsArr,
11455 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11460 for (i = 0; i < cellUl->dmrsArrSize; ++i)
11462 cellUl->dmrsArr[i] = cellCfg->puschSubBand.dmrs[i];
11465 /* Init subframes */
11466 for (i = 0; i < maxSubfrms; ++i)
11468 ret = rgSCHUtlUlSfInit(cell, &cellUl->ulSfArr[i], i,
11469 cellUl->maxAllocPerUlSf);
11472 for (; i != 0; --i)
11474 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[i-1]);
11476 /* ccpu00117052 - MOD - Passing double pointer
11477 for proper NULLP assignment*/
11478 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)(&(cellUl->dmrsArr)),
11479 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11481 /* ccpu00117052 - MOD - Passing double pointer
11482 for proper NULLP assignment*/
11483 rgSCHUtlFreeSBuf(cell->instIdx,
11484 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11489 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cellUl);
11494 * @brief Scheduler processing on cell configuration.
11498 * Function : rgSCHCmnRgrCellCfg
11500 * This function does requisite initialisation
11501 * and setup for scheduler1 when a cell is
11504 * @param[in] RgSchCellCb *cell
11505 * @param[in] RgrCellCfg *cellCfg
11506 * @param[out] RgSchErrInfo *err
11512 PUBLIC S16 rgSCHCmnRgrCellCfg
11515 RgrCellCfg *cellCfg,
11519 PUBLIC S16 rgSCHCmnRgrCellCfg(cell, cellCfg, err)
11521 RgrCellCfg *cellCfg;
11526 RgSchCmnCell *cellSch;
11527 TRC2(rgSCHCmnRgrCellCfg);
11529 /* As part of RGR cell configuration, validate the CRGCellCfg
11530 * There is no trigger for crgCellCfg from SC1 */
11531 /* Removed failure check for Extended CP */
11533 if (((ret = rgSCHUtlAllocSBuf(cell->instIdx,
11534 (Data**)&(cell->sc.sch), (sizeof(RgSchCmnCell)))) != ROK))
11536 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
11537 "Memory allocation FAILED");
11538 err->errCause = RGSCHERR_SCH_CFG;
11541 cellSch = (RgSchCmnCell *)(cell->sc.sch);
11542 cellSch->cfiCfg = cellCfg->cfiCfg;
11543 cellSch->trgUlCqi.trgCqi = cellCfg->trgUlCqi.trgCqi;
11544 /* Initialize the scheduler refresh timer queues */
11545 cellSch->tmrTqCp.nxtEnt = 0;
11546 cellSch->tmrTqCp.tmrLen = RG_SCH_CMN_NUM_REFRESH_Q;
11548 /* RACHO Intialize the RACH ded Preamble Information */
11549 rgSCHCmnCfgRachDedPrm(cell);
11551 /* Initialize 'Np' value for each 'p' used for
11552 * HARQ ACK/NACK reception */
11553 rgSCHCmnDlNpValInit(cell);
11556 /* Initialize 'Np' value for each 'p' used for
11557 * HARQ ACK/NACK reception */
11559 rgSCHCmnDlNpValInit(cell);
11562 /* Now perform uplink related initializations */
11563 ret = rgSCHCmnUlCellInit(cell, cellCfg);
11566 /* There is no downlink deinit to be performed */
11567 err->errCause = RGSCHERR_SCH_CFG;
11570 ret = rgSCHCmnDlRgrCellCfg(cell, cellCfg, err);
11573 err->errCause = RGSCHERR_SCH_CFG;
11576 /* DL scheduler has no initializations to make */
11577 /* As of now DL scheduler always returns ROK */
11579 rgSCHCmnGetDciFrmtSizes(cell);
11580 rgSCHCmnGetCqiDciFrmt2AggrLvl(cell);
11582 rgSCHCmnGetEmtcDciFrmtSizes(cell);
11583 rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl(cell);
11584 #endif /* EMTC_ENABLE */
11587 if(TRUE == cellCfg->emtcEnable)
11589 cellSch->apisEmtcUl = &rgSchEmtcUlSchdTbl[0];
11590 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11597 cellSch->apisUl = &rgSchUlSchdTbl[RG_SCH_CMN_GET_UL_SCHED_TYPE(cell)];
11598 ret = cellSch->apisUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11604 if(TRUE == cellCfg->emtcEnable)
11606 cellSch->apisEmtcDl = &rgSchEmtcDlSchdTbl[0];
11607 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11614 cellSch->apisDl = &rgSchDlSchdTbl[RG_SCH_CMN_GET_DL_SCHED_TYPE(cell)];
11616 /* Perform SPS specific initialization for the cell */
11617 ret = rgSCHCmnSpsCellCfg(cell, cellCfg, err);
11623 ret = cellSch->apisDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11628 rgSCHCmnInitVars(cell);
11631 } /* rgSCHCmnRgrCellCfg*/
11635 * @brief This function handles the reconfiguration of cell.
11639 * Function: rgSCHCmnRgrCellRecfg
11640 * Purpose: Update the reconfiguration parameters.
11642 * Invoked by: Scheduler
11644 * @param[in] RgSchCellCb* cell
11649 PUBLIC S16 rgSCHCmnRgrCellRecfg
11652 RgrCellRecfg *recfg,
11656 PUBLIC S16 rgSCHCmnRgrCellRecfg(cell, recfg, err)
11658 RgrCellRecfg *recfg;
11663 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11664 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11666 TRC2(rgSCHCmnRgrCellRecfg);
11668 if (recfg->recfgTypes & RGR_CELL_UL_CMNRATE_RECFG)
11670 U8 oldCqi = cellUl->dfltUlCqi;
11671 if (!RG_SCH_CMN_UL_IS_CQI_VALID(recfg->ulCmnCodeRate.ccchCqi))
11673 err->errCause = RGSCHERR_SCH_CFG;
11674 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnRgrCellRecfg(): "
11678 cellUl->dfltUlCqi = recfg->ulCmnCodeRate.ccchCqi;
11679 ret = rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11680 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11683 cellUl->dfltUlCqi = oldCqi;
11684 rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11685 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11690 if (recfg->recfgTypes & RGR_CELL_DL_CMNRATE_RECFG)
11692 if (rgSCHCmnDlCnsdrCmnRt(cell, &recfg->dlCmnCodeRate) != ROK)
11694 err->errCause = RGSCHERR_SCH_CFG;
11700 if(TRUE == cell->emtcEnable)
11702 /* Invoke UL sched for cell Recfg */
11703 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11709 /* Invoke DL sched for cell Recfg */
11710 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11719 /* Invoke UL sched for cell Recfg */
11720 ret = cellSch->apisUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11726 /* Invoke DL sched for cell Recfg */
11727 ret = cellSch->apisDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11734 if (recfg->recfgTypes & RGR_CELL_DLFS_RECFG)
11736 ret = cellSch->apisDlfs->rgSCHDlfsCellRecfg(cell, recfg, err);
11741 cellSch->dl.isDlFreqSel = recfg->dlfsRecfg.isDlFreqSel;
11744 if (recfg->recfgTypes & RGR_CELL_PWR_RECFG)
11746 ret = rgSCHPwrCellRecfg(cell, recfg);
11756 /***********************************************************
11758 * Func : rgSCHCmnUlCellDeinit
11760 * Desc : Uplink scheduler de-initialisation for cell.
11768 **********************************************************/
11770 PRIVATE Void rgSCHCmnUlCellDeinit
11775 PRIVATE Void rgSCHCmnUlCellDeinit(cell)
11779 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11782 U8 maxSubfrms = cellUl->numUlSubfrms;
11785 CmLList *lnk = NULLP;
11786 RgSchL2MeasCb *measCb;
11788 TRC2(rgSCHCmnUlCellDeinit);
11791 for(ulSfIdx = 0; ulSfIdx < RGSCH_SF_ALLOC_SIZE; ulSfIdx++)
11793 for(ulSfIdx = 0; ulSfIdx < RGSCH_NUM_SUB_FRAMES; ulSfIdx++)
11796 if(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo != NULLP)
11798 /* ccpu00117052 - MOD - Passing double pointer
11799 for proper NULLP assignment*/
11800 rgSCHUtlFreeSBuf(cell->instIdx,
11801 (Data **)(&(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo)),
11802 cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc));
11804 /* ccpu00117052 - DEL - removed explicit NULLP assignment
11805 as it is done in above utility function */
11808 /* Free the memory allocated to measCb */
11809 lnk = cell->l2mList.first;
11810 while(lnk != NULLP)
11812 measCb = (RgSchL2MeasCb *)lnk->node;
11813 cmLListDelFrm(&cell->l2mList, lnk);
11815 /* ccpu00117052 - MOD - Passing double pointer
11816 for proper NULLP assignment*/
11817 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&measCb,\
11818 sizeof(RgSchL2MeasCb));
11821 if (cellUl->dmrsArr != NULLP)
11823 /* ccpu00117052 - MOD - Passing double pointer
11824 for proper NULLP assignment*/
11825 rgSCHUtlFreeSBuf(cell->instIdx,(Data **)(&(cellUl->dmrsArr)),
11826 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11828 /* De-init subframes */
11830 for (ulSfIdx = 0; ulSfIdx < maxSubfrms; ++ulSfIdx)
11832 for (ulSfIdx = 0; ulSfIdx < RG_SCH_CMN_UL_NUM_SF; ++ulSfIdx)
11835 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[ulSfIdx]);
11839 if (cellUl->ulSfArr != NULLP)
11841 /* ccpu00117052 - MOD - Passing double pointer
11842 for proper NULLP assignment*/
11843 rgSCHUtlFreeSBuf(cell->instIdx,
11844 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11852 * @brief Scheduler processing for cell delete.
11856 * Function : rgSCHCmnCellDel
11858 * This functions de-initialises and frees memory
11859 * taken up by scheduler1 for the entire cell.
11861 * @param[in] RgSchCellCb *cell
11865 PUBLIC Void rgSCHCmnCellDel
11870 PUBLIC Void rgSCHCmnCellDel(cell)
11874 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11875 TRC2(rgSCHCmnCellDel);
11880 if (cellSch == NULLP)
11884 /* Perform the deinit for the UL scheduler */
11885 rgSCHCmnUlCellDeinit(cell);
11887 if(TRUE == cell->emtcEnable)
11889 if (cellSch->apisEmtcUl)
11891 cellSch->apisEmtcUl->rgSCHFreeUlCell(cell);
11895 if (cellSch->apisUl)
11897 /* api pointer checks added (here and below in
11898 * this function). pl check. - antriksh */
11899 cellSch->apisUl->rgSCHFreeUlCell(cell);
11902 /* Perform the deinit for the DL scheduler */
11903 cmLListInit(&cellSch->dl.taLst);
11904 if (cellSch->apisDl)
11906 cellSch->apisDl->rgSCHFreeDlCell(cell);
11909 if (cellSch->apisEmtcDl)
11911 rgSCHEmtcInitTaLst(&cellSch->dl);
11913 cellSch->apisEmtcDl->rgSCHFreeDlCell(cell);
11917 /* DLFS de-initialization */
11918 if (cellSch->dl.isDlFreqSel && cellSch->apisDlfs)
11920 cellSch->apisDlfs->rgSCHDlfsCellDel(cell);
11923 rgSCHPwrCellDel(cell);
11925 rgSCHCmnSpsCellDel(cell);
11928 /* ccpu00117052 - MOD - Passing double pointer
11929 for proper NULLP assignment*/
11930 rgSCHUtlFreeSBuf(cell->instIdx,
11931 (Data**)(&(cell->sc.sch)), (sizeof(RgSchCmnCell)));
11933 } /* rgSCHCmnCellDel */
11937 * @brief This function validates QOS parameters for DL.
11941 * Function: rgSCHCmnValidateDlQos
11942 * Purpose: This function validates QOS parameters for DL.
11944 * Invoked by: Scheduler
11946 * @param[in] CrgLchQosCfg *dlQos
11951 PRIVATE S16 rgSCHCmnValidateDlQos
11953 RgrLchQosCfg *dlQos
11956 PRIVATE S16 rgSCHCmnValidateDlQos(dlQos)
11957 RgrLchQosCfg *dlQos;
11960 U8 qci = dlQos->qci;
11962 TRC2(rgSCHCmnValidateDlQos);
11964 if ( qci < RG_SCH_CMN_MIN_QCI || qci > RG_SCH_CMN_MAX_QCI )
11969 if ((qci >= RG_SCH_CMN_GBR_QCI_START) &&
11970 (qci <= RG_SCH_CMN_GBR_QCI_END))
11972 if ((dlQos->mbr == 0) || (dlQos->mbr < dlQos->gbr))
11981 * @brief Scheduler invocation on logical channel addition.
11985 * Function : rgSCHCmnRgrLchCfg
11987 * This functions does required processing when a new
11988 * (dedicated) logical channel is added. Assumes lcg
11989 * pointer in ulLc is set.
11991 * @param[in] RgSchCellCb *cell
11992 * @param[in] RgSchUeCb *ue
11993 * @param[in] RgSchDlLcCb *dlLc
11994 * @param[int] RgrLchCfg *lcCfg
11995 * @param[out] RgSchErrInfo *err
12001 PUBLIC S16 rgSCHCmnRgrLchCfg
12010 PUBLIC S16 rgSCHCmnRgrLchCfg(cell, ue, dlLc, lcCfg, err)
12020 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12022 TRC2(rgSCHCmnRgrLchCfg);
12024 ret = rgSCHUtlAllocSBuf(cell->instIdx,
12025 (Data**)&((dlLc)->sch), (sizeof(RgSchCmnDlSvc)));
12028 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRgrLchCfg(): "
12029 "SCH struct alloc failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12030 err->errCause = RGSCHERR_SCH_CFG;
12033 if(lcCfg->lcType != CM_LTE_LCH_DCCH)
12035 ret = rgSCHCmnValidateDlQos(&lcCfg->dlInfo.dlQos);
12038 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSchCmnCrgLcCfg(): "
12039 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12040 err->errCause = RGSCHERR_SCH_CFG;
12043 /* Perform DL service activation in the scheduler */
12044 ((RgSchCmnDlSvc *)(dlLc->sch))->qci = lcCfg->dlInfo.dlQos.qci;
12045 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = rgSchCmnDlQciPrio[lcCfg->dlInfo.dlQos.qci - 1];
12046 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcCfg->dlInfo.dlQos.gbr * \
12047 RG_SCH_CMN_REFRESH_TIME)/100;
12048 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcCfg->dlInfo.dlQos.mbr * \
12049 RG_SCH_CMN_REFRESH_TIME)/100;
12053 /*assigning highest priority to DCCH */
12054 ((RgSchCmnDlSvc *)(dlLc->sch))->prio=RG_SCH_CMN_DCCH_PRIO;
12057 dlLc->lcType=lcCfg->lcType;
12060 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12062 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcCfg(cell, ue,dlLc ,lcCfg, err);
12071 ret = cellSch->apisDl->rgSCHRgrDlLcCfg(cell, ue, dlLc, lcCfg, err);
12079 if(TRUE == ue->isEmtcUe)
12081 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12090 ret = cellSch->apisUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12100 rgSCHSCellDlLcCfg(cell, ue, dlLc);
12106 if(lcCfg->dlInfo.dlSpsCfg.isSpsEnabled)
12108 /* Invoke SPS module if SPS is enabled for the service */
12109 ret = rgSCHCmnSpsDlLcCfg(cell, ue, dlLc, lcCfg, err);
12112 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "rgSchCmnRgrLchCfg(): "
12113 "SPS configuration failed for DL LC for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12114 err->errCause = RGSCHERR_SCH_CFG;
12124 * @brief Scheduler invocation on logical channel addition.
12128 * Function : rgSCHCmnRgrLchRecfg
12130 * This functions does required processing when an existing
12131 * (dedicated) logical channel is reconfigured. Assumes lcg
12132 * pointer in ulLc is set to the old value.
12133 * Independent of whether new LCG is meant to be configured,
12134 * the new LCG scheduler information is accessed and possibly modified.
12136 * @param[in] RgSchCellCb *cell
12137 * @param[in] RgSchUeCb *ue
12138 * @param[in] RgSchDlLcCb *dlLc
12139 * @param[int] RgrLchRecfg *lcRecfg
12140 * @param[out] RgSchErrInfo *err
12146 PUBLIC S16 rgSCHCmnRgrLchRecfg
12151 RgrLchRecfg *lcRecfg,
12155 PUBLIC S16 rgSCHCmnRgrLchRecfg(cell, ue, dlLc, lcRecfg, err)
12159 RgrLchRecfg *lcRecfg;
12164 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12166 TRC2(rgSCHCmnRgrLchRecfg)
12168 if(dlLc->lcType != CM_LTE_LCH_DCCH)
12170 ret = rgSCHCmnValidateDlQos(&lcRecfg->dlRecfg.dlQos);
12174 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
12175 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12176 err->errCause = RGSCHERR_SCH_CFG;
12179 if (((RgSchCmnDlSvc *)(dlLc->sch))->qci != lcRecfg->dlRecfg.dlQos.qci)
12181 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Qci, hence lc Priority change "
12182 "not supported for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12183 err->errCause = RGSCHERR_SCH_CFG;
12186 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcRecfg->dlRecfg.dlQos.gbr * \
12187 RG_SCH_CMN_REFRESH_TIME)/100;
12188 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcRecfg->dlRecfg.dlQos.mbr * \
12189 RG_SCH_CMN_REFRESH_TIME)/100;
12193 /*assigning highest priority to DCCH */
12194 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = RG_SCH_CMN_DCCH_PRIO;
12198 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12200 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12205 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12214 ret = cellSch->apisDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12219 ret = cellSch->apisUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12227 if (lcRecfg->recfgTypes & RGR_DL_LC_SPS_RECFG)
12229 /* Invoke SPS module if SPS is enabled for the service */
12230 if(lcRecfg->dlRecfg.dlSpsRecfg.isSpsEnabled)
12232 ret = rgSCHCmnSpsDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12235 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"SPS re-configuration not "
12236 "supported for dlLC Ignore this CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12247 * @brief Scheduler invocation on logical channel addition.
12251 * Function : rgSCHCmnRgrLcgCfg
12253 * This functions does required processing when a new
12254 * (dedicated) logical channel is added. Assumes lcg
12255 * pointer in ulLc is set.
12257 * @param[in] RgSchCellCb *cell,
12258 * @param[in] RgSchUeCb *ue,
12259 * @param[in] RgSchLcgCb *lcg,
12260 * @param[in] RgrLcgCfg *lcgCfg,
12261 * @param[out] RgSchErrInfo *err
12267 PUBLIC S16 rgSCHCmnRgrLcgCfg
12276 PUBLIC S16 rgSCHCmnRgrLcgCfg(cell, ue, lcg, lcgCfg, err)
12285 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12286 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCfg->ulInfo.lcgId].sch));
12288 TRC2(rgSCHCmnRgrLcgCfg);
12290 ulLcg->cfgdGbr = (lcgCfg->ulInfo.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12291 ulLcg->effGbr = ulLcg->cfgdGbr;
12292 ulLcg->deltaMbr = ((lcgCfg->ulInfo.mbr - lcgCfg->ulInfo.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12293 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12296 if(TRUE == ue->isEmtcUe)
12298 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12307 ret = cellSch->apisUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12313 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12315 /* Indicate MAC that this LCG is GBR LCG */
12316 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcgCfg->ulInfo.lcgId, TRUE);
12322 * @brief Scheduler invocation on logical channel addition.
12326 * Function : rgSCHCmnRgrLcgRecfg
12328 * This functions does required processing when a new
12329 * (dedicated) logical channel is added. Assumes lcg
12330 * pointer in ulLc is set.
12332 * @param[in] RgSchCellCb *cell,
12333 * @param[in] RgSchUeCb *ue,
12334 * @param[in] RgSchLcgCb *lcg,
12335 * @param[in] RgrLcgRecfg *reCfg,
12336 * @param[out] RgSchErrInfo *err
12342 PUBLIC S16 rgSCHCmnRgrLcgRecfg
12347 RgrLcgRecfg *reCfg,
12351 PUBLIC S16 rgSCHCmnRgrLcgRecfg(cell, ue, lcg, reCfg, err)
12355 RgrLcgRecfg *reCfg;
12360 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12361 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[reCfg->ulRecfg.lcgId].sch));
12363 TRC2(rgSCHCmnRgrLcgRecfg);
12365 ulLcg->cfgdGbr = (reCfg->ulRecfg.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12366 ulLcg->effGbr = ulLcg->cfgdGbr;
12367 ulLcg->deltaMbr = ((reCfg->ulRecfg.mbr - reCfg->ulRecfg.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12368 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12371 if(TRUE == ue->isEmtcUe)
12373 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12382 ret = cellSch->apisUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12388 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12390 /* Indicate MAC that this LCG is GBR LCG */
12391 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, TRUE);
12395 /* In case of RAB modification */
12396 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, FALSE);
12401 /***********************************************************
12403 * Func : rgSCHCmnRgrLchDel
12405 * Desc : Scheduler handling for a (dedicated)
12406 * uplink logical channel being deleted.
12413 **********************************************************/
12415 PUBLIC S16 rgSCHCmnRgrLchDel
12423 PUBLIC S16 rgSCHCmnRgrLchDel(cell, ue, lcId, lcgId)
12430 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12431 TRC2(rgSCHCmnRgrLchDel);
12433 if(TRUE == ue->isEmtcUe)
12435 cellSch->apisEmtcUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12440 cellSch->apisUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12445 /***********************************************************
12447 * Func : rgSCHCmnLcgDel
12449 * Desc : Scheduler handling for a (dedicated)
12450 * uplink logical channel being deleted.
12458 **********************************************************/
12460 PUBLIC Void rgSCHCmnLcgDel
12467 PUBLIC Void rgSCHCmnLcgDel(cell, ue, lcg)
12473 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12474 RgSchCmnLcg *lcgCmn = RG_SCH_CMN_GET_UL_LCG(lcg);
12475 TRC2(rgSCHCmnLcgDel);
12477 if (lcgCmn == NULLP)
12482 if (RGSCH_IS_GBR_BEARER(lcgCmn->cfgdGbr))
12484 /* Indicate MAC that this LCG is GBR LCG */
12485 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcg->lcgId, FALSE);
12489 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
12491 rgSCHCmnSpsUlLcgDel(cell, ue, lcg);
12493 #endif /* LTEMAC_SPS */
12495 lcgCmn->effGbr = 0;
12496 lcgCmn->reportedBs = 0;
12497 lcgCmn->cfgdGbr = 0;
12498 /* set lcg bs to 0. Deletion of control block happens
12499 * at the time of UE deletion. */
12502 if(TRUE == ue->isEmtcUe)
12504 cellSch->apisEmtcUl->rgSCHFreeUlLcg(cell, ue, lcg);
12509 cellSch->apisUl->rgSCHFreeUlLcg(cell, ue, lcg);
12516 * @brief This function deletes a service from scheduler.
12520 * Function: rgSCHCmnFreeDlLc
12521 * Purpose: This function is made available through a FP for
12522 * making scheduler aware of a service being deleted from UE.
12524 * Invoked by: BO and Scheduler
12526 * @param[in] RgSchCellCb* cell
12527 * @param[in] RgSchUeCb* ue
12528 * @param[in] RgSchDlLcCb* svc
12533 PUBLIC Void rgSCHCmnFreeDlLc
12540 PUBLIC Void rgSCHCmnFreeDlLc(cell, ue, svc)
12546 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12547 TRC2(rgSCHCmnFreeDlLc);
12548 if (svc->sch == NULLP)
12553 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12555 cellSch->apisEmtcDl->rgSCHFreeDlLc(cell, ue, svc);
12560 cellSch->apisDl->rgSCHFreeDlLc(cell, ue, svc);
12566 rgSCHSCellDlLcDel(cell, ue, svc);
12571 /* If SPS service, invoke SPS module */
12572 if (svc->dlLcSpsCfg.isSpsEnabled)
12574 rgSCHCmnSpsDlLcDel(cell, ue, svc);
12578 /* ccpu00117052 - MOD - Passing double pointer
12579 for proper NULLP assignment*/
12580 rgSCHUtlFreeSBuf(cell->instIdx,
12581 (Data**)(&(svc->sch)), (sizeof(RgSchCmnDlSvc)));
12584 rgSCHLaaDeInitDlLchCb(cell, svc);
12593 * @brief This function Processes the Final Allocations
12594 * made by the RB Allocator against the requested
12595 * CCCH SDURetx Allocations.
12599 * Function: rgSCHCmnDlCcchSduRetxFnlz
12600 * Purpose: This function Processes the Final Allocations
12601 * made by the RB Allocator against the requested
12602 * CCCH Retx Allocations.
12603 * Scans through the scheduled list of ccchSdu retrans
12604 * fills the corresponding pdcch, adds the hqProc to
12605 * the corresponding SubFrm and removes the hqP from
12608 * Invoked by: Common Scheduler
12610 * @param[in] RgSchCellCb *cell
12611 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12616 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz
12619 RgSchCmnDlRbAllocInfo *allocInfo
12622 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo)
12624 RgSchCmnDlRbAllocInfo *allocInfo;
12628 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12629 RgSchDlRbAlloc *rbAllocInfo;
12630 RgSchDlHqProcCb *hqP;
12632 TRC2(rgSCHCmnDlCcchSduRetxFnlz);
12634 /* Traverse through the Scheduled Retx List */
12635 node = allocInfo->ccchSduAlloc.schdCcchSduRetxLst.first;
12638 hqP = (RgSchDlHqProcCb *)(node->node);
12640 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
12642 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12644 /* Remove the HqP from cell's ccchSduRetxLst */
12645 cmLListDelFrm(&cmnCellDl->ccchSduRetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12646 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12648 /* Fix: syed dlAllocCb reset should be performed.
12649 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12650 rgSCHCmnDlUeResetTemp(ue, hqP);
12652 /* Fix: syed dlAllocCb reset should be performed.
12653 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12654 node = allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst.first;
12657 hqP = (RgSchDlHqProcCb *)(node->node);
12660 /* reset the UE allocation Information */
12661 rgSCHCmnDlUeResetTemp(ue, hqP);
12667 * @brief This function Processes the Final Allocations
12668 * made by the RB Allocator against the requested
12669 * CCCH Retx Allocations.
12673 * Function: rgSCHCmnDlCcchRetxFnlz
12674 * Purpose: This function Processes the Final Allocations
12675 * made by the RB Allocator against the requested
12676 * CCCH Retx Allocations.
12677 * Scans through the scheduled list of msg4 retrans
12678 * fills the corresponding pdcch, adds the hqProc to
12679 * the corresponding SubFrm and removes the hqP from
12682 * Invoked by: Common Scheduler
12684 * @param[in] RgSchCellCb *cell
12685 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12690 PRIVATE Void rgSCHCmnDlCcchRetxFnlz
12693 RgSchCmnDlRbAllocInfo *allocInfo
12696 PRIVATE Void rgSCHCmnDlCcchRetxFnlz(cell, allocInfo)
12698 RgSchCmnDlRbAllocInfo *allocInfo;
12702 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12703 RgSchDlRbAlloc *rbAllocInfo;
12704 RgSchDlHqProcCb *hqP;
12706 TRC2(rgSCHCmnDlCcchRetxFnlz);
12708 /* Traverse through the Scheduled Retx List */
12709 node = allocInfo->msg4Alloc.schdMsg4RetxLst.first;
12712 hqP = (RgSchDlHqProcCb *)(node->node);
12713 raCb = hqP->hqE->raCb;
12714 rbAllocInfo = &raCb->rbAllocInfo;
12716 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12718 /* Remove the HqP from cell's msg4RetxLst */
12719 cmLListDelFrm(&cmnCellDl->msg4RetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12720 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12721 /* Fix: syed dlAllocCb reset should be performed.
12722 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12723 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12724 rgSCHCmnDlHqPResetTemp(hqP);
12726 /* Fix: syed dlAllocCb reset should be performed.
12727 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12728 node = allocInfo->msg4Alloc.nonSchdMsg4RetxLst.first;
12731 hqP = (RgSchDlHqProcCb *)(node->node);
12732 raCb = hqP->hqE->raCb;
12734 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12735 rgSCHCmnDlHqPResetTemp(hqP);
12742 * @brief This function Processes the Final Allocations
12743 * made by the RB Allocator against the requested
12744 * CCCH SDU tx Allocations.
12748 * Function: rgSCHCmnDlCcchSduTxFnlz
12749 * Purpose: This function Processes the Final Allocations
12750 * made by the RB Allocator against the requested
12751 * CCCH tx Allocations.
12752 * Scans through the scheduled list of CCCH SDU trans
12753 * fills the corresponding pdcch, adds the hqProc to
12754 * the corresponding SubFrm and removes the hqP from
12757 * Invoked by: Common Scheduler
12759 * @param[in] RgSchCellCb *cell
12760 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12765 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz
12768 RgSchCmnDlRbAllocInfo *allocInfo
12771 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo)
12773 RgSchCmnDlRbAllocInfo *allocInfo;
12778 RgSchDlRbAlloc *rbAllocInfo;
12779 RgSchDlHqProcCb *hqP;
12780 RgSchLchAllocInfo lchSchdData;
12781 TRC2(rgSCHCmnDlCcchSduTxFnlz);
12783 /* Traverse through the Scheduled Retx List */
12784 node = allocInfo->ccchSduAlloc.schdCcchSduTxLst.first;
12787 hqP = (RgSchDlHqProcCb *)(node->node);
12788 ueCb = hqP->hqE->ue;
12790 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
12792 /* fill the pdcch and HqProc */
12793 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12795 /* Remove the raCb from cell's toBeSchdLst */
12796 cmLListDelFrm(&cell->ccchSduUeLst, &ueCb->ccchSduLnk);
12797 ueCb->ccchSduLnk.node = (PTR)NULLP;
12799 /* Fix : Resetting this required to avoid complication
12800 * in reestablishment case */
12801 ueCb->dlCcchInfo.bo = 0;
12803 /* Indicate DHM of the CCCH LC scheduling */
12804 hqP->tbInfo[0].contResCe = NOTPRSNT;
12805 lchSchdData.lcId = 0;
12806 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12807 (RGSCH_MSG4_HDRSIZE);
12808 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12810 /* Fix: syed dlAllocCb reset should be performed.
12811 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12812 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12814 /* Fix: syed dlAllocCb reset should be performed.
12815 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12816 node = allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst.first;
12819 hqP = (RgSchDlHqProcCb *)(node->node);
12820 ueCb = hqP->hqE->ue;
12822 /* Release HqProc */
12823 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12824 /*Fix: Removing releasing of TB1 as it will not exist for CCCH SDU and hence caused a crash*/
12825 /*rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12826 /* reset the UE allocation Information */
12827 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12834 * @brief This function Processes the Final Allocations
12835 * made by the RB Allocator against the requested
12836 * CCCH tx Allocations.
12840 * Function: rgSCHCmnDlCcchTxFnlz
12841 * Purpose: This function Processes the Final Allocations
12842 * made by the RB Allocator against the requested
12843 * CCCH tx Allocations.
12844 * Scans through the scheduled list of msg4 trans
12845 * fills the corresponding pdcch, adds the hqProc to
12846 * the corresponding SubFrm and removes the hqP from
12849 * Invoked by: Common Scheduler
12851 * @param[in] RgSchCellCb *cell
12852 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12857 PRIVATE Void rgSCHCmnDlCcchTxFnlz
12860 RgSchCmnDlRbAllocInfo *allocInfo
12863 PRIVATE Void rgSCHCmnDlCcchTxFnlz(cell, allocInfo)
12865 RgSchCmnDlRbAllocInfo *allocInfo;
12870 RgSchDlRbAlloc *rbAllocInfo;
12871 RgSchDlHqProcCb *hqP;
12872 RgSchLchAllocInfo lchSchdData;
12873 TRC2(rgSCHCmnDlCcchTxFnlz);
12875 /* Traverse through the Scheduled Retx List */
12876 node = allocInfo->msg4Alloc.schdMsg4TxLst.first;
12879 hqP = (RgSchDlHqProcCb *)(node->node);
12880 raCb = hqP->hqE->raCb;
12882 rbAllocInfo = &raCb->rbAllocInfo;
12884 /* fill the pdcch and HqProc */
12885 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12886 /* MSG4 Fix Start */
12888 rgSCHRamRmvFrmRaInfoSchdLst(cell, raCb);
12891 /* Indicate DHM of the CCCH LC scheduling */
12892 lchSchdData.lcId = 0;
12893 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12894 (RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE);
12895 /* TRansmitting presence of cont Res CE across MAC-SCH interface to
12896 * identify CCCH SDU transmissions which need to be done
12898 * contention resolution CE*/
12899 hqP->tbInfo[0].contResCe = PRSNT_NODEF;
12900 /*Dont add lc if only cont res CE is being transmitted*/
12901 if(raCb->dlCcchInfo.bo)
12903 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12908 /* Fix: syed dlAllocCb reset should be performed.
12909 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12910 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12911 rgSCHCmnDlHqPResetTemp(hqP);
12913 node = allocInfo->msg4Alloc.nonSchdMsg4TxLst.first;
12916 hqP = (RgSchDlHqProcCb *)(node->node);
12917 raCb = hqP->hqE->raCb;
12919 rbAllocInfo = &raCb->rbAllocInfo;
12920 /* Release HqProc */
12921 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12922 /*Fix: Removing releasing of TB1 as it will not exist for MSG4 and hence caused a crash*/
12923 /* rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12924 /* reset the UE allocation Information */
12925 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12926 rgSCHCmnDlHqPResetTemp(hqP);
12933 * @brief This function calculates the BI Index to be sent in the Bi header
12937 * Function: rgSCHCmnGetBiIndex
12938 * Purpose: This function Processes utilizes the previous BI time value
12939 * calculated and the difference last BI sent time and current time. To
12940 * calculate the latest BI Index. It also considers the how many UE's
12941 * Unserved in this subframe.
12943 * Invoked by: Common Scheduler
12945 * @param[in] RgSchCellCb *cell
12946 * @param[in] U32 ueCount
12951 PUBLIC U8 rgSCHCmnGetBiIndex
12957 PUBLIC U8 rgSCHCmnGetBiIndex(cell, ueCount)
12962 S16 prevVal = 0; /* To Store Intermediate Value */
12963 U16 newBiVal = 0; /* To store Bi Value in millisecond */
12967 TRC2(rgSCHCmnGetBiIndex)
12969 if (cell->biInfo.prevBiTime != 0)
12972 if(cell->emtcEnable == TRUE)
12974 timeDiff =(RGSCH_CALC_SF_DIFF_EMTC(cell->crntTime, cell->biInfo.biTime));
12979 timeDiff =(RGSCH_CALC_SF_DIFF(cell->crntTime, cell->biInfo.biTime));
12982 prevVal = cell->biInfo.prevBiTime - timeDiff;
12988 newBiVal = RG_SCH_CMN_GET_BI_VAL(prevVal,ueCount);
12989 /* To be used next time when BI is calculated */
12991 if(cell->emtcEnable == TRUE)
12993 RGSCHCPYTIMEINFO_EMTC(cell->crntTime, cell->biInfo.biTime)
12998 RGSCHCPYTIMEINFO(cell->crntTime, cell->biInfo.biTime)
13001 /* Search the actual BI Index from table Backoff Parameters Value and
13002 * return that Index */
13005 if (rgSchCmnBiTbl[idx] > newBiVal)
13010 }while(idx < RG_SCH_CMN_NUM_BI_VAL-1);
13011 cell->biInfo.prevBiTime = rgSchCmnBiTbl[idx];
13012 /* For 16 Entries in Table 7.2.1 36.321.880 - 3 reserved so total 13 Entries */
13013 RETVALUE(idx); /* Returning reserved value from table UE treats it has 960 ms */
13014 } /* rgSCHCmnGetBiIndex */
13018 * @brief This function Processes the Final Allocations
13019 * made by the RB Allocator against the requested
13020 * RAR allocations. Assumption: The reuqested
13021 * allocations are always satisfied completely.
13022 * Hence no roll back.
13026 * Function: rgSCHCmnDlRaRspFnlz
13027 * Purpose: This function Processes the Final Allocations
13028 * made by the RB Allocator against the requested.
13029 * Takes care of PDCCH filling.
13031 * Invoked by: Common Scheduler
13033 * @param[in] RgSchCellCb *cell
13034 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13039 PRIVATE Void rgSCHCmnDlRaRspFnlz
13042 RgSchCmnDlRbAllocInfo *allocInfo
13045 PRIVATE Void rgSCHCmnDlRaRspFnlz(cell, allocInfo)
13047 RgSchCmnDlRbAllocInfo *allocInfo;
13051 RgSchDlRbAlloc *raRspAlloc;
13052 RgSchDlSf *subFrm = NULLP;
13056 RgSchRaReqInfo *raReq;
13058 RgSchUlAlloc *ulAllocRef=NULLP;
13059 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13060 U8 allocRapidCnt = 0;
13062 U32 msg3SchdIdx = 0;
13063 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
13067 TRC2(rgSCHCmnDlRaRspFnlz);
13069 for (rarCnt=0; rarCnt<RG_SCH_CMN_MAX_CMN_PDCCH; rarCnt++)
13071 raRspAlloc = &allocInfo->raRspAlloc[rarCnt];
13072 /* Having likely condition first for optimization */
13073 if (!raRspAlloc->pdcch)
13079 subFrm = raRspAlloc->dlSf;
13080 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13081 /* Corrected RACH handling for multiple RAPIDs per RARNTI */
13082 allocRapidCnt = raRspAlloc->numRapids;
13083 while (allocRapidCnt)
13085 raReq = (RgSchRaReqInfo *)(reqLst->first->node);
13086 /* RACHO: If dedicated preamble, then allocate UL Grant
13087 * (consequence of handover/pdcchOrder) and continue */
13088 if (RGSCH_IS_DEDPRM(cell, raReq->raReq.rapId))
13090 rgSCHCmnHdlHoPo(cell, &subFrm->raRsp[rarCnt].contFreeUeLst,
13092 cmLListDelFrm(reqLst, reqLst->first);
13094 /* ccpu00117052 - MOD - Passing double pointer
13095 for proper NULLP assignment*/
13096 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13097 sizeof(RgSchRaReqInfo));
13101 if(cell->overLoadBackOffEnab)
13102 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13103 cmLListDelFrm(reqLst, reqLst->first);
13105 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13106 sizeof(RgSchRaReqInfo));
13109 /* Attempt to include each RA request into the RSP */
13110 /* Any failure in the procedure is considered to */
13111 /* affect futher allocations in the same TTI. When */
13112 /* a failure happens, we break out and complete */
13113 /* the processing for random access */
13114 if (rgSCHRamCreateRaCb(cell, &raCb, &err) != ROK)
13118 /* Msg3 allocation request to USM */
13119 if (raReq->raReq.rapId < cell->rachCfg.sizeRaPreambleGrpA)
13123 /*ccpu00128820 - MOD - Msg3 alloc double delete issue*/
13124 rgSCHCmnMsg3GrntReq(cell, raCb->tmpCrnti, preamGrpA, \
13125 &(raCb->msg3HqProc), &ulAllocRef, &raCb->msg3HqProcId);
13126 if (ulAllocRef == NULLP)
13128 rgSCHRamDelRaCb(cell, raCb, TRUE);
13131 if (raReq->raReq.cqiPres)
13133 raCb->ccchCqi = raReq->raReq.cqiIdx;
13137 raCb->ccchCqi = cellDl->ccchCqi;
13139 raCb->rapId = raReq->raReq.rapId;
13140 raCb->ta.pres = TRUE;
13141 raCb->ta.val = raReq->raReq.ta;
13142 raCb->msg3Grnt = ulAllocRef->grnt;
13143 /* Populating the tpc value received */
13144 raCb->msg3Grnt.tpc = raReq->raReq.tpc;
13145 /* PHR handling for MSG3 */
13146 ulAllocRef->raCb = raCb;
13148 /* To the crntTime, add the MIN time at which UE will
13149 * actually send MSG3 i.e DL_DELTA+6 */
13150 raCb->msg3AllocTime = cell->crntTime;
13151 RGSCH_INCR_SUB_FRAME(raCb->msg3AllocTime, RG_SCH_CMN_MIN_MSG3_RECP_INTRVL);
13153 msg3SchdIdx = (cell->crntTime.subframe+RG_SCH_CMN_DL_DELTA) %
13154 RGSCH_NUM_SUB_FRAMES;
13155 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
13156 special subframe */
13157 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][msg3SchdIdx] !=
13158 RG_SCH_TDD_UL_SUBFRAME)
13160 RGSCHCMNADDTOCRNTTIME(cell->crntTime,raCb->msg3AllocTime,
13161 RG_SCH_CMN_DL_DELTA)
13162 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][
13163 raCb->msg3AllocTime.subframe];
13164 RGSCHCMNADDTOCRNTTIME(raCb->msg3AllocTime, raCb->msg3AllocTime,
13168 cmLListAdd2Tail(&subFrm->raRsp[rarCnt].raRspLst, &raCb->rspLnk);
13169 raCb->rspLnk.node = (PTR)raCb;
13170 cmLListDelFrm(reqLst, reqLst->first);
13172 /* ccpu00117052 - MOD - Passing double pointer
13173 for proper NULLP assignment*/
13174 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13175 sizeof(RgSchRaReqInfo));
13177 /* SR_RACH_STATS : RAR scheduled */
13182 /* Fill subframe data members */
13183 subFrm->raRsp[rarCnt].raRnti = raRspAlloc->rnti;
13184 subFrm->raRsp[rarCnt].pdcch = raRspAlloc->pdcch;
13185 subFrm->raRsp[rarCnt].tbSz = raRspAlloc->tbInfo[0].bytesAlloc;
13186 /* Fill PDCCH data members */
13187 rgSCHCmnFillPdcch(cell, subFrm->raRsp[rarCnt].pdcch, raRspAlloc);
13190 if(cell->overLoadBackOffEnab)
13191 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13192 subFrm->raRsp[rarCnt].backOffInd.pres = PRSNT_NODEF;
13193 subFrm->raRsp[rarCnt].backOffInd.val = cell->overLoadBackOffval;
13198 subFrm->raRsp[rarCnt].backOffInd.pres = NOTPRSNT;
13201 /*[ccpu00125212] Avoiding sending of empty RAR in case of RAR window
13202 is short and UE is sending unauthorised preamble.*/
13203 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13204 if ((raRspAlloc->biEstmt) && (reqLst->count))
13206 subFrm->raRsp[0].backOffInd.pres = PRSNT_NODEF;
13207 /* Added as part of Upgrade */
13208 subFrm->raRsp[0].backOffInd.val =
13209 rgSCHCmnGetBiIndex(cell, reqLst->count);
13211 /* SR_RACH_STATS : Back Off Inds */
13215 else if ((subFrm->raRsp[rarCnt].raRspLst.first == NULLP) &&
13216 (subFrm->raRsp[rarCnt].contFreeUeLst.first == NULLP))
13218 /* Return the grabbed PDCCH */
13219 rgSCHUtlPdcchPut(cell, &subFrm->pdcchInfo, raRspAlloc->pdcch);
13220 subFrm->raRsp[rarCnt].pdcch = NULLP;
13221 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRaRspAlloc(): "
13222 "Not even one RaReq.");
13226 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId,
13227 "RNTI:%d Scheduled RAR @ (%u,%u) ",
13229 cell->crntTime.sfn,
13230 cell->crntTime.subframe);
13236 * @brief This function computes rv.
13240 * Function: rgSCHCmnDlCalcRvForBcch
13241 * Purpose: This function computes rv.
13243 * Invoked by: Common Scheduler
13245 * @param[in] RgSchCellCb *cell
13246 * @param[in] Bool si
13252 PRIVATE U8 rgSCHCmnDlCalcRvForBcch
13259 PRIVATE U8 rgSCHCmnDlCalcRvForBcch(cell, si, i)
13266 CmLteTimingInfo frm;
13267 TRC2(rgSCHCmnDlCalcRvForBcch);
13269 frm = cell->crntTime;
13270 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
13278 k = (frm.sfn/2) % 4;
13280 rv = RGSCH_CEIL(3*k, 2) % 4;
13285 * @brief This function Processes the Final Allocations
13286 * made by the RB Allocator against the requested
13287 * BCCH/PCCH allocations. Assumption: The reuqested
13288 * allocations are always satisfied completely.
13289 * Hence no roll back.
13293 * Function: rgSCHCmnDlBcchPcchFnlz
13294 * Purpose: This function Processes the Final Allocations
13295 * made by the RB Allocator against the requested.
13296 * Takes care of PDCCH filling.
13298 * Invoked by: Common Scheduler
13300 * @param[in] RgSchCellCb *cell
13301 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13306 PRIVATE Void rgSCHCmnDlBcchPcchFnlz
13309 RgSchCmnDlRbAllocInfo *allocInfo
13312 PRIVATE Void rgSCHCmnDlBcchPcchFnlz(cell, allocInfo)
13314 RgSchCmnDlRbAllocInfo *allocInfo;
13317 RgSchDlRbAlloc *rbAllocInfo;
13321 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
13323 #ifdef LTEMAC_HDFDD
13324 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13326 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13330 /* Moving variables to available scope for optimization */
13331 RgSchClcDlLcCb *pcch;
13334 RgSchClcDlLcCb *bcch;
13337 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13339 TRC2(rgSCHCmnDlBcchPcchFnlz);
13342 rbAllocInfo = &allocInfo->pcchAlloc;
13343 if (rbAllocInfo->pdcch)
13345 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13347 /* Added sfIdx calculation for TDD as well */
13349 #ifdef LTEMAC_HDFDD
13350 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13352 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13355 subFrm = rbAllocInfo->dlSf;
13356 pcch = rgSCHDbmGetPcch(cell);
13359 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnDlBcchPcchFnlz( ): "
13360 "No Pcch Present");
13364 /* Added Dl TB count for paging message transmission*/
13366 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13368 bo = (RgSchClcBoRpt *)pcch->boLst.first->node;
13369 cmLListDelFrm(&pcch->boLst, &bo->boLstEnt);
13370 /* ccpu00117052 - MOD - Passing double pointer
13371 for proper NULLP assignment*/
13372 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13373 /* Fill subframe data members */
13374 subFrm->pcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13375 subFrm->pcch.pdcch = rbAllocInfo->pdcch;
13376 /* Fill PDCCH data members */
13377 rgSCHCmnFillPdcch(cell, subFrm->pcch.pdcch, rbAllocInfo);
13378 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, pcch->lcId, TRUE);
13379 /* ccpu00132314-ADD-Update the tx power allocation info
13380 TODO-Need to add a check for max tx power per symbol */
13381 subfrmAlloc->cmnLcInfo.pcchInfo.txPwrOffset = cellDl->pcchTxPwrOffset;
13385 rbAllocInfo = &allocInfo->bcchAlloc;
13386 if (rbAllocInfo->pdcch)
13388 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13390 #ifdef LTEMAC_HDFDD
13391 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13393 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13396 subFrm = rbAllocInfo->dlSf;
13398 /* Fill subframe data members */
13399 subFrm->bcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13400 subFrm->bcch.pdcch = rbAllocInfo->pdcch;
13401 /* Fill PDCCH data members */
13402 rgSCHCmnFillPdcch(cell, subFrm->bcch.pdcch, rbAllocInfo);
13404 if(rbAllocInfo->schdFirst)
13407 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
13408 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13410 /*Copy the SIB1 msg buff into interface buffer */
13411 SCpyMsgMsg(cell->siCb.crntSiInfo.sib1Info.sib1,
13412 rgSchCb[cell->instIdx].rgSchInit.region,
13413 rgSchCb[cell->instIdx].rgSchInit.pool,
13414 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13415 #endif/*RGR_SI_SCH*/
13416 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13417 rgSCHCmnDlCalcRvForBcch(cell, FALSE, 0);
13425 i = cell->siCb.siCtx.i;
13426 /*Decrement the retransmission count */
13427 cell->siCb.siCtx.retxCntRem--;
13429 /*Copy the SI msg buff into interface buffer */
13430 if(cell->siCb.siCtx.warningSiFlag == FALSE)
13432 SCpyMsgMsg(cell->siCb.siArray[cell->siCb.siCtx.siId-1].si,
13433 rgSchCb[cell->instIdx].rgSchInit.region,
13434 rgSchCb[cell->instIdx].rgSchInit.pool,
13435 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13439 pdu = rgSCHUtlGetWarningSiPdu(cell);
13440 RGSCH_NULL_CHECK(cell->instIdx, pdu);
13442 rgSchCb[cell->instIdx].rgSchInit.region,
13443 rgSchCb[cell->instIdx].rgSchInit.pool,
13444 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13445 if(cell->siCb.siCtx.retxCntRem == 0)
13447 rgSCHUtlFreeWarningSiPdu(cell);
13448 cell->siCb.siCtx.warningSiFlag = FALSE;
13453 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
13454 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13456 if(bo->retxCnt != cell->siCfg.retxCnt-1)
13461 #endif/*RGR_SI_SCH*/
13462 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13463 rgSCHCmnDlCalcRvForBcch(cell, TRUE, i);
13466 /* Added Dl TB count for SIB1 and SI messages transmission.
13467 * This counter will be incremented only for the first transmission
13468 * (with RV 0) of these messages*/
13470 if(subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv == 0)
13472 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13476 if(bo->retxCnt == 0)
13478 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
13479 /* ccpu00117052 - MOD - Passing double pointer
13480 for proper NULLP assignment*/
13481 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13483 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, bcch->lcId, sendInd);
13485 /*Fill the interface info */
13486 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, NULLD, NULLD);
13488 /* ccpu00132314-ADD-Update the tx power allocation info
13489 TODO-Need to add a check for max tx power per symbol */
13490 subfrmAlloc->cmnLcInfo.bcchInfo.txPwrOffset = cellDl->bcchTxPwrOffset;
13492 /*mBuf has been already copied above */
13493 #endif/*RGR_SI_SCH*/
13506 * Function: rgSCHCmnUlSetAllUnSched
13509 * Invoked by: Common Scheduler
13511 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13516 PRIVATE Void rgSCHCmnUlSetAllUnSched
13518 RgSchCmnUlRbAllocInfo *allocInfo
13521 PRIVATE Void rgSCHCmnUlSetAllUnSched(allocInfo)
13522 RgSchCmnUlRbAllocInfo *allocInfo;
13527 TRC2(rgSCHCmnUlSetAllUnSched);
13529 node = allocInfo->contResLst.first;
13532 rgSCHCmnUlMov2NonSchdCntResLst(allocInfo, (RgSchUeCb *)node->node);
13533 node = allocInfo->contResLst.first;
13536 node = allocInfo->retxUeLst.first;
13539 rgSCHCmnUlMov2NonSchdRetxUeLst(allocInfo, (RgSchUeCb *)node->node);
13540 node = allocInfo->retxUeLst.first;
13543 node = allocInfo->ueLst.first;
13546 rgSCHCmnUlMov2NonSchdUeLst(allocInfo, (RgSchUeCb *)node->node);
13547 node = allocInfo->ueLst.first;
13559 * Function: rgSCHCmnUlAdd2CntResLst
13562 * Invoked by: Common Scheduler
13564 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13565 * @param[in] RgSchUeCb *ue
13570 PUBLIC Void rgSCHCmnUlAdd2CntResLst
13572 RgSchCmnUlRbAllocInfo *allocInfo,
13576 PUBLIC Void rgSCHCmnUlAdd2CntResLst(allocInfo, ue)
13577 RgSchCmnUlRbAllocInfo *allocInfo;
13581 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,ue->cell))->alloc);
13582 TRC2(rgSCHCmnUlAdd2CntResLst);
13583 cmLListAdd2Tail(&allocInfo->contResLst, &ulAllocInfo->reqLnk);
13584 ulAllocInfo->reqLnk.node = (PTR)ue;
13593 * Function: rgSCHCmnUlAdd2UeLst
13596 * Invoked by: Common Scheduler
13598 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13599 * @param[in] RgSchUeCb *ue
13604 PUBLIC Void rgSCHCmnUlAdd2UeLst
13607 RgSchCmnUlRbAllocInfo *allocInfo,
13611 PUBLIC Void rgSCHCmnUlAdd2UeLst(cell, allocInfo, ue)
13613 RgSchCmnUlRbAllocInfo *allocInfo;
13617 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,cell))->alloc);
13618 TRC2(rgSCHCmnUlAdd2UeLst);
13619 if (ulAllocInfo->reqLnk.node == NULLP)
13621 cmLListAdd2Tail(&allocInfo->ueLst, &ulAllocInfo->reqLnk);
13622 ulAllocInfo->reqLnk.node = (PTR)ue;
13632 * Function: rgSCHCmnAllocUlRb
13633 * Purpose: To do RB allocations for uplink
13635 * Invoked by: Common Scheduler
13637 * @param[in] RgSchCellCb *cell
13638 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
13642 PUBLIC Void rgSCHCmnAllocUlRb
13645 RgSchCmnUlRbAllocInfo *allocInfo
13648 PUBLIC Void rgSCHCmnAllocUlRb(cell, allocInfo)
13650 RgSchCmnUlRbAllocInfo *allocInfo;
13653 RgSchUlSf *sf = allocInfo->sf;
13654 TRC2(rgSCHCmnAllocUlRb);
13656 /* Schedule for new transmissions */
13657 rgSCHCmnUlRbAllocForLst(cell, sf, allocInfo->ueLst.count,
13658 &allocInfo->ueLst, &allocInfo->schdUeLst,
13659 &allocInfo->nonSchdUeLst, (Bool)TRUE);
13663 /***********************************************************
13665 * Func : rgSCHCmnUlRbAllocForLst
13667 * Desc : Allocate for a list in cmn rb alloc information passed
13676 **********************************************************/
13678 PRIVATE Void rgSCHCmnUlRbAllocForLst
13684 CmLListCp *schdLst,
13685 CmLListCp *nonSchdLst,
13689 PRIVATE Void rgSCHCmnUlRbAllocForLst(cell, sf, count, reqLst, schdLst,
13690 nonSchdLst, isNewTx)
13695 CmLListCp *schdLst;
13696 CmLListCp *nonSchdLst;
13705 CmLteTimingInfo timeInfo;
13708 TRC2(rgSCHCmnUlRbAllocForLst);
13710 if(schdLst->count == 0)
13712 cmLListInit(schdLst);
13715 cmLListInit(nonSchdLst);
13717 if(isNewTx == TRUE)
13719 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.numUes = (U8) count;
13721 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime, timeInfo, TFU_ULCNTRL_DLDELTA);
13722 k = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][timeInfo.subframe];
13723 RG_SCH_ADD_TO_CRNT_TIME(timeInfo,
13724 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo, k);
13726 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime,cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo,
13727 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
13732 for (lnk = reqLst->first; count; lnk = lnk->next, --count)
13734 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13735 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13740 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
13745 ueUl->subbandShare = ueUl->subbandRequired;
13746 if(isNewTx == TRUE)
13748 maxRb = RGSCH_MIN((ueUl->subbandRequired * MAX_5GTF_VRBG_SIZE), ue->ue5gtfCb.maxPrb);
13750 ret = rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole);
13753 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, schdLst);
13754 rgSCHCmnUlUeFillAllocInfo(cell, ue);
13758 gUl5gtfRbAllocFail++;
13759 #if defined (TENB_STATS) && defined (RG_5GTF)
13760 cell->tenbStats->sch.ul5gtfRbAllocFail++;
13762 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13763 ue->isMsg4PdcchWithCrnti = FALSE;
13764 ue->isSrGrant = FALSE;
13767 if(isNewTx == TRUE)
13769 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13770 ulAllocInfo[count - 1].rnti = ue->ueId;
13771 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13772 ulAllocInfo[count - 1].numPrb = ue->ul.nPrb;
13775 ueUl->subbandShare = 0; /* This reset will take care of
13776 * all scheduler types */
13778 for (; count; lnk = lnk->next, --count)
13780 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13781 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13782 ue->isMsg4PdcchWithCrnti = FALSE;
13788 /***********************************************************
13790 * Func : rgSCHCmnUlMdfyGrntForCqi
13792 * Desc : Modify UL Grant to consider presence of
13793 * CQI along with PUSCH Data.
13798 * - Scale down iTbs based on betaOffset and
13799 * size of Acqi Size.
13800 * - Optionally attempt to increase numSb by 1
13801 * if input payload size does not fit in due
13802 * to reduced tbSz as a result of iTbsNew.
13806 **********************************************************/
13808 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi
13820 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi(cell, ue, maxRb, numSb, iTbs, hqSz, stepDownItbs, effTgt)
13831 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(ue->cell);
13836 U32 remREsForPusch;
13839 U32 betaOffVal = ue->ul.betaOffstVal;
13840 U32 cqiRiRptSz = ue->ul.cqiRiSz;
13841 U32 betaOffHqVal = rgSchCmnBetaHqOffstTbl[ue->ul.betaHqOffst];
13842 U32 resNumSb = *numSb;
13843 U32 puschEff = 1000;
13846 Bool mdfyiTbsFlg = FALSE;
13847 U8 resiTbs = *iTbs;
13849 TRC2(rgSCHCmnUlMdfyGrntForCqi)
13854 iMcs = rgSCHCmnUlGetIMcsFrmITbs(resiTbs, RG_SCH_CMN_GET_UE_CTGY(ue));
13855 RG_SCH_UL_MCS_TO_MODODR(iMcs, modOdr);
13856 if (RG_SCH_CMN_GET_UE_CTGY(ue) != CM_LTE_UE_CAT_5)
13858 modOdr = RGSCH_MIN(RGSCH_QM_QPSK, modOdr);
13862 modOdr = RGSCH_MIN(RGSCH_QM_64QAM, modOdr);
13864 nPrb = resNumSb * cellUl->sbSize;
13865 /* Restricting the minumum iTbs requried to modify to 10 */
13866 if ((nPrb >= maxRb) && (resiTbs <= 10))
13868 /* Could not accomodate ACQI */
13871 totREs = nPrb * RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
13872 tbSz = rgTbSzTbl[0][resiTbs][nPrb-1];
13873 /* totalREs/tbSz = num of bits perRE. */
13874 cqiRiREs = (totREs * betaOffVal * cqiRiRptSz)/(1000 * tbSz); /* betaOffVal is represented
13875 as parts per 1000 */
13876 hqREs = (totREs * betaOffHqVal * hqSz)/(1000 * tbSz);
13877 if ((cqiRiREs + hqREs) < totREs)
13879 remREsForPusch = totREs - cqiRiREs - hqREs;
13880 bitsPerRe = (tbSz * 1000)/remREsForPusch; /* Multiplying by 1000 for Interger Oper */
13881 puschEff = bitsPerRe/modOdr;
13883 if (puschEff < effTgt)
13885 /* ensure resultant efficiency for PUSCH Data is within 0.93*/
13890 /* Alternate between increasing SB or decreasing iTbs until eff is met */
13891 if (mdfyiTbsFlg == FALSE)
13895 resNumSb = resNumSb + 1;
13897 mdfyiTbsFlg = TRUE;
13903 resiTbs-= stepDownItbs;
13905 mdfyiTbsFlg = FALSE;
13908 }while (1); /* Loop breaks if efficency is met
13909 or returns RFAILED if not able to meet the efficiency */
13917 /***********************************************************
13919 * Func : rgSCHCmnUlRbAllocForUe
13921 * Desc : Do uplink RB allocation for an UE.
13925 * Notes: Note that as of now, for retx, maxRb
13926 * is not considered. Alternatives, such
13927 * as dropping retx if it crosses maxRb
13928 * could be considered.
13932 **********************************************************/
13934 PRIVATE S16 rgSCHCmnUlRbAllocForUe
13943 PRIVATE S16 rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole)
13951 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
13952 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13953 RgSchUlAlloc *alloc = NULLP;
13959 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->schdHqProcIdx];
13961 RgSchUlHqProcCb *proc = NULLP;
13967 TfuDciFormat dciFrmt;
13971 TRC2(rgSCHCmnUlRbAllocForUe);
13973 rgSCHUhmGetAvlHqProc(cell, ue, &proc);
13976 //printf("UE [%d] HQ Proc unavailable\n", ue->ueId);
13981 if (ue->ue5gtfCb.rank == 2)
13983 dciFrmt = TFU_DCI_FORMAT_A2;
13988 dciFrmt = TFU_DCI_FORMAT_A1;
13991 /* 5gtf TODO : To pass dci frmt to this function */
13992 pdcch = rgSCHCmnPdcchAllocCrntSf(cell, ue);
13995 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
13996 "rgSCHCmnUlRbAllocForUe(): Could not get PDCCH for CRNTI:%d",ue->ueId);
13999 gUl5gtfPdcchSchd++;
14000 #if defined (TENB_STATS) && defined (RG_5GTF)
14001 cell->tenbStats->sch.ul5gtfPdcchSchd++;
14004 //TODO_SID using configured prb as of now
14005 nPrb = ue->ue5gtfCb.maxPrb;
14006 reqVrbg = nPrb/MAX_5GTF_VRBG_SIZE;
14007 iMcs = ue->ue5gtfCb.mcs; //gSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
14011 if((sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart > MAX_5GTF_VRBG)
14012 || (sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated > MAX_5GTF_VRBG))
14014 printf("5GTF_ERROR vrbg > 25 valstart = %d valalloc %d\n", sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart
14015 , sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated);
14020 /*TODO_SID: Workaround for alloc. Currently alloc is ulsf based. To handle multiple beams, we need a different
14021 design. Now alloc are formed based on MAX_5GTF_UE_SCH macro. */
14022 numVrbgTemp = MAX_5GTF_VRBG/MAX_5GTF_UE_SCH;
14025 alloc = rgSCHCmnUlSbAlloc(sf, numVrbgTemp,\
14028 if (alloc == NULLP)
14030 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
14031 "rgSCHCmnUlRbAllocForUe(): Could not get UlAlloc %d CRNTI:%d",numVrbg,ue->ueId);
14032 rgSCHCmnPdcchRlsCrntSf(cell, pdcch);
14035 gUl5gtfAllocAllocated++;
14036 #if defined (TENB_STATS) && defined (RG_5GTF)
14037 cell->tenbStats->sch.ul5gtfAllocAllocated++;
14039 alloc->grnt.vrbgStart = sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart;
14040 alloc->grnt.numVrbg = numVrbg;
14041 alloc->grnt.numLyr = numLyr;
14042 alloc->grnt.dciFrmt = dciFrmt;
14044 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart += numVrbg;
14045 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated += numVrbg;
14047 //rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
14049 sf->totPrb += alloc->grnt.numRb;
14050 ue->ul.nPrb = alloc->grnt.numRb;
14052 if (ue->csgMmbrSta != TRUE)
14054 cellUl->ncsgPrbCnt += alloc->grnt.numRb;
14056 cellUl->totPrbCnt += (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14057 alloc->pdcch = pdcch;
14058 alloc->grnt.iMcs = iMcs;
14059 alloc->grnt.iMcsCrnt = iMcsCrnt;
14060 alloc->grnt.hop = 0;
14061 /* Initial Num RBs support for UCI on PUSCH */
14063 ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14065 alloc->forMsg3 = FALSE;
14066 //RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTb5gtfSzTbl[0], (iTbs));
14068 //ueUl->alloc.allocdBytes = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
14069 /* TODO_SID Allocating based on configured MCS as of now.
14070 Currently for format A2. When doing multi grp per tti, need to update this. */
14071 ueUl->alloc.allocdBytes = (rgSch5gtfTbSzTbl[iMcs]/8) * ue->ue5gtfCb.rank;
14073 alloc->grnt.datSz = ueUl->alloc.allocdBytes;
14074 //TODO_SID Need to check mod order.
14075 RG_SCH_CMN_TBS_TO_MODODR(iMcs, alloc->grnt.modOdr);
14076 //alloc->grnt.modOdr = 6;
14077 alloc->grnt.isRtx = FALSE;
14079 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
14080 alloc->grnt.SCID = 0;
14081 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
14082 alloc->grnt.PMI = 0;
14083 alloc->grnt.uciOnxPUSCH = 0;
14084 alloc->grnt.hqProcId = proc->procId;
14086 alloc->hqProc = proc;
14087 alloc->hqProc->ulSfIdx = cellUl->schdIdx;
14089 /*commenting to retain the rnti used for transmission SPS/c-rnti */
14090 alloc->rnti = ue->ueId;
14091 ueUl->alloc.alloc = alloc;
14092 /*rntiwari-Adding the debug for generating the graph.*/
14093 /* No grant attr recorded now */
14097 /***********************************************************
14099 * Func : rgSCHCmnUlRbAllocAddUeToLst
14101 * Desc : Add UE to list (scheduled/non-scheduled list)
14102 * for UL RB allocation information.
14110 **********************************************************/
14112 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst
14119 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst(cell, ue, lst)
14125 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
14126 TRC2(rgSCHCmnUlRbAllocAddUeToLst);
14129 gUl5gtfUeRbAllocDone++;
14130 #if defined (TENB_STATS) && defined (RG_5GTF)
14131 cell->tenbStats->sch.ul5gtfUeRbAllocDone++;
14133 cmLListAdd2Tail(lst, &ueUl->alloc.schdLstLnk);
14134 ueUl->alloc.schdLstLnk.node = (PTR)ue;
14139 * @brief This function Processes the Final Allocations
14140 * made by the RB Allocator against the requested.
14144 * Function: rgSCHCmnUlAllocFnlz
14145 * Purpose: This function Processes the Final Allocations
14146 * made by the RB Allocator against the requested.
14148 * Invoked by: Common Scheduler
14150 * @param[in] RgSchCellCb *cell
14151 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14156 PRIVATE Void rgSCHCmnUlAllocFnlz
14159 RgSchCmnUlRbAllocInfo *allocInfo
14162 PRIVATE Void rgSCHCmnUlAllocFnlz(cell, allocInfo)
14164 RgSchCmnUlRbAllocInfo *allocInfo;
14167 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14168 TRC2(rgSCHCmnUlAllocFnlz);
14170 /* call scheduler specific Finalization */
14171 cellSch->apisUl->rgSCHUlAllocFnlz(cell, allocInfo);
14177 * @brief This function Processes the Final Allocations
14178 * made by the RB Allocator against the requested.
14182 * Function: rgSCHCmnDlAllocFnlz
14183 * Purpose: This function Processes the Final Allocations
14184 * made by the RB Allocator against the requested.
14186 * Invoked by: Common Scheduler
14188 * @param[in] RgSchCellCb *cell
14193 PUBLIC Void rgSCHCmnDlAllocFnlz
14198 PUBLIC Void rgSCHCmnDlAllocFnlz(cell)
14202 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14203 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
14205 TRC2(rgSCHCmnDlAllocFnlz);
14207 rgSCHCmnDlCcchRetxFnlz(cell, allocInfo);
14208 rgSCHCmnDlCcchTxFnlz(cell, allocInfo);
14210 /* Added below functions for handling CCCH SDU transmission received
14212 * * guard timer expiry*/
14213 rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo);
14214 rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo);
14216 rgSCHCmnDlRaRspFnlz(cell, allocInfo);
14217 /* call scheduler specific Finalization */
14218 cellSch->apisDl->rgSCHDlAllocFnlz(cell, allocInfo);
14220 /* Stack Crash problem for TRACE5 Changes. Added the return below */
14227 * @brief Update an uplink subframe.
14231 * Function : rgSCHCmnUlUpdSf
14233 * For each allocation
14234 * - if no more tx needed
14235 * - Release allocation
14237 * - Perform retransmission
14239 * @param[in] RgSchUlSf *sf
14243 PRIVATE Void rgSCHCmnUlUpdSf
14246 RgSchCmnUlRbAllocInfo *allocInfo,
14250 PRIVATE Void rgSCHCmnUlUpdSf(cell, allocInfo, sf)
14252 RgSchCmnUlRbAllocInfo *allocInfo;
14257 TRC2(rgSCHCmnUlUpdSf);
14259 while ((lnk = sf->allocs.first))
14261 RgSchUlAlloc *alloc = (RgSchUlAlloc *)lnk->node;
14264 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
14269 /* If need to handle all retx together, run another loop separately */
14270 rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc);
14272 rgSCHCmnUlRlsUlAlloc(cell, sf, alloc);
14275 /* By this time, all allocs would have been cleared and
14276 * SF is reset to be made ready for new allocations. */
14277 rgSCHCmnUlSfReset(cell, sf);
14278 /* In case there are timing problems due to msg3
14279 * allocations being done in advance, (which will
14280 * probably happen with the current FDD code that
14281 * handles 8 subframes) one solution
14282 * could be to hold the (recent) msg3 allocs in a separate
14283 * list, and then possibly add that to the actual
14284 * list later. So at this time while allocations are
14285 * traversed, the recent msg3 ones are not seen. Anytime after
14286 * this (a good time is when the usual allocations
14287 * are made), msg3 allocations could be transferred to the
14288 * normal list. Not doing this now as it is assumed
14289 * that incorporation of TDD shall take care of this.
14297 * @brief Handle uplink allocation for retransmission.
14301 * Function : rgSCHCmnUlHndlAllocRetx
14303 * Processing Steps:
14304 * - Add to queue for retx.
14305 * - Do not release here, release happends as part
14306 * of the loop that calls this function.
14308 * @param[in] RgSchCellCb *cell
14309 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14310 * @param[in] RgSchUlSf *sf
14311 * @param[in] RgSchUlAlloc *alloc
14315 PRIVATE Void rgSCHCmnUlHndlAllocRetx
14318 RgSchCmnUlRbAllocInfo *allocInfo,
14320 RgSchUlAlloc *alloc
14323 PRIVATE Void rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc)
14325 RgSchCmnUlRbAllocInfo *allocInfo;
14327 RgSchUlAlloc *alloc;
14331 RgSchCmnUlUe *ueUl;
14332 TRC2(rgSCHCmnUlHndlAllocRetx);
14334 rgTbSzTbl[0][rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs)]\
14335 [alloc->grnt.numRb-1]/8;
14336 if (!alloc->forMsg3)
14338 ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue);
14339 ueUl->alloc.reqBytes = bytes;
14340 rgSCHUhmRetx(alloc->hqProc);
14341 rgSCHCmnUlAdd2RetxUeLst(allocInfo, alloc->ue);
14345 /* RACHO msg3 retx handling. Part of RACH procedure changes. */
14346 retxAlloc = rgSCHCmnUlGetUlAlloc(cell, sf, alloc->numSb);
14347 if (retxAlloc == NULLP)
14349 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
14350 "rgSCHCmnUlRbAllocForUe():Could not get UlAlloc for msg3Retx RNTI:%d",
14354 retxAlloc->grnt.iMcs = alloc->grnt.iMcs;
14355 retxAlloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl\
14356 [alloc->hqProc->rvIdx];
14357 retxAlloc->grnt.nDmrs = 0;
14358 retxAlloc->grnt.hop = 0;
14359 retxAlloc->grnt.delayBit = 0;
14360 retxAlloc->rnti = alloc->rnti;
14361 retxAlloc->ue = NULLP;
14362 retxAlloc->pdcch = FALSE;
14363 retxAlloc->forMsg3 = TRUE;
14364 retxAlloc->raCb = alloc->raCb;
14365 retxAlloc->hqProc = alloc->hqProc;
14366 rgSCHUhmRetx(retxAlloc->hqProc);
14373 * @brief Uplink Scheduling Handler.
14377 * Function: rgSCHCmnUlAlloc
14378 * Purpose: This function Handles Uplink Scheduling.
14380 * Invoked by: Common Scheduler
14382 * @param[in] RgSchCellCb *cell
14385 /* ccpu00132653- The definition of this function made common for TDD and FDD*/
14387 PRIVATE Void rgSCHCmnUlAlloc
14392 PRIVATE Void rgSCHCmnUlAlloc(cell)
14396 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14397 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
14398 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
14399 RgSchCmnUlRbAllocInfo allocInfo;
14400 RgSchCmnUlRbAllocInfo *allocInfoRef = &allocInfo;
14406 TRC2(rgSCHCmnUlAlloc);
14408 /* Initializing RgSchCmnUlRbAllocInfo structure */
14409 rgSCHCmnInitUlRbAllocInfo(allocInfoRef);
14411 /* Get Uplink Subframe */
14412 allocInfoRef->sf = &cellUl->ulSfArr[cellUl->schdIdx];
14414 /* initializing the UL PRB count */
14415 allocInfoRef->sf->totPrb = 0;
14419 rgSCHCmnSpsUlTti(cell, allocInfoRef);
14422 if(*allocInfoRef->sf->allocCountRef == 0)
14426 if ((hole = rgSCHUtlUlHoleFirst(allocInfoRef->sf)) != NULLP)
14428 /* Sanity check of holeDb */
14429 if (allocInfoRef->sf->holeDb->count == 1 && hole->start == 0)
14431 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
14432 /* Re-Initialize available subbands because of CFI change*/
14433 allocInfoRef->sf->availSubbands = cell->dynCfiCb.\
14434 bwInfo[cellDl->currCfi].numSb;
14435 /*Currently initializing 5gtf ulsf specific initialization here.
14436 need to do at proper place */
14438 allocInfoRef->sf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
14439 allocInfoRef->sf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
14440 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
14442 allocInfoRef->sf->sfBeamInfo[idx].totVrbgAllocated = 0;
14443 allocInfoRef->sf->sfBeamInfo[idx].totVrbgRequired = 0;
14444 allocInfoRef->sf->sfBeamInfo[idx].vrbgStart = 0;
14450 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
14451 "Error! holeDb sanity check failed");
14456 /* Fix: Adaptive re-transmissions prioritised over other transmissions */
14457 /* perform adaptive retransmissions */
14458 rgSCHCmnUlSfReTxAllocs(cell, allocInfoRef->sf);
14462 /* Fix: syed Adaptive Msg3 Retx crash. Release all
14463 Harq processes for which adap Retx failed, to avoid
14464 blocking. This step should be done before New TX
14465 scheduling to make hqProc available. Right now we
14466 dont check if proc is in adap Retx list for considering
14467 it to be available. But now with this release that
14468 functionality would be correct. */
14470 rgSCHCmnUlSfRlsRetxProcs(cell, allocInfoRef->sf);
14473 /* Specific UL scheduler to perform UE scheduling */
14474 cellSch->apisUl->rgSCHUlSched(cell, allocInfoRef);
14476 /* Call UL RB allocator module */
14477 rgSCHCmnAllocUlRb(cell, allocInfoRef);
14479 /* Do group power control for PUSCH */
14480 rgSCHCmnGrpPwrCntrlPusch(cell, allocInfoRef->sf);
14482 cell->sc.apis->rgSCHDrxStrtInActvTmrInUl(cell);
14484 rgSCHCmnUlAllocFnlz(cell, allocInfoRef);
14485 if(5000 == g5gtfTtiCnt)
14487 ul5gtfsidDlAlreadyMarkUl = 0;
14488 ul5gtfsidDlSchdPass = 0;
14489 ul5gtfsidUlMarkUl = 0;
14490 ul5gtfTotSchdCnt = 0;
14498 * @brief send Subframe Allocations.
14502 * Function: rgSCHCmnSndCnsldtInfo
14503 * Purpose: Send the scheduled
14504 * allocations to MAC for StaInd generation to Higher layers and
14505 * for MUXing. PST's RgInfSfAlloc to MAC instance.
14507 * Invoked by: Common Scheduler
14509 * @param[in] RgSchCellCb *cell
14513 PUBLIC Void rgSCHCmnSndCnsldtInfo
14518 PUBLIC Void rgSCHCmnSndCnsldtInfo(cell)
14522 RgInfSfAlloc *subfrmAlloc;
14524 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14526 TRC2(rgSCHCmnSndCnsldtInfo);
14528 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14530 /* Send the allocations to MAC for MUXing */
14531 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
14532 subfrmAlloc->cellId = cell->cellId;
14533 /* Populate the List of UEs needing PDB-based Flow control */
14534 cellSch->apisDl->rgSCHDlFillFlwCtrlInfo(cell, subfrmAlloc);
14536 if((subfrmAlloc->rarInfo.numRaRntis) ||
14538 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14539 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14540 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14542 (subfrmAlloc->ueInfo.numUes) ||
14543 (subfrmAlloc->cmnLcInfo.bitMask) ||
14544 (subfrmAlloc->ulUeInfo.numUes) ||
14545 (subfrmAlloc->flowCntrlInfo.numUes))
14547 if((subfrmAlloc->rarInfo.numRaRntis) ||
14549 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14550 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14551 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14553 (subfrmAlloc->ueInfo.numUes) ||
14554 (subfrmAlloc->cmnLcInfo.bitMask) ||
14555 (subfrmAlloc->flowCntrlInfo.numUes))
14558 RgSchMacSfAlloc(&pst, subfrmAlloc);
14561 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_NUM_SUB_FRAMES;
14563 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_SF_ALLOC_SIZE;
14569 * @brief Consolidate Subframe Allocations.
14573 * Function: rgSCHCmnCnsldtSfAlloc
14574 * Purpose: Consolidate Subframe Allocations.
14576 * Invoked by: Common Scheduler
14578 * @param[in] RgSchCellCb *cell
14582 PUBLIC Void rgSCHCmnCnsldtSfAlloc
14587 PUBLIC Void rgSCHCmnCnsldtSfAlloc(cell)
14591 RgInfSfAlloc *subfrmAlloc;
14592 CmLteTimingInfo frm;
14594 CmLListCp dlDrxInactvTmrLst;
14595 CmLListCp dlInActvLst;
14596 CmLListCp ulInActvLst;
14597 RgSchCmnCell *cellSch = NULLP;
14599 TRC2(rgSCHCmnCnsldtSfAlloc);
14601 cmLListInit(&dlDrxInactvTmrLst);
14602 cmLListInit(&dlInActvLst);
14603 cmLListInit(&ulInActvLst);
14605 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14607 /* Get Downlink Subframe */
14608 frm = cell->crntTime;
14609 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
14610 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14612 /* Fill the allocation Info */
14613 rgSCHUtlFillRgInfRarInfo(dlSf, subfrmAlloc, cell);
14616 rgSCHUtlFillRgInfUeInfo(dlSf, cell, &dlDrxInactvTmrLst,
14617 &dlInActvLst, &ulInActvLst);
14618 #ifdef RG_PFS_STATS
14619 cell->totalPrb += dlSf->bwAssigned;
14621 /* Mark the following Ues inactive for UL*/
14622 cellSch = RG_SCH_CMN_GET_CELL(cell);
14624 /* Calling Scheduler specific function with DRX inactive UE list*/
14625 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInActvLst);
14626 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInActvLst);
14629 /*re/start DRX inactivity timer for the UEs*/
14630 (Void)rgSCHDrxStrtInActvTmr(cell,&dlDrxInactvTmrLst,RG_SCH_DRX_DL);
14636 * @brief Initialize the DL Allocation Information Structure.
14640 * Function: rgSCHCmnInitDlRbAllocInfo
14641 * Purpose: Initialize the DL Allocation Information Structure.
14643 * Invoked by: Common Scheduler
14645 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
14649 PRIVATE Void rgSCHCmnInitDlRbAllocInfo
14651 RgSchCmnDlRbAllocInfo *allocInfo
14654 PRIVATE Void rgSCHCmnInitDlRbAllocInfo(allocInfo)
14655 RgSchCmnDlRbAllocInfo *allocInfo;
14658 TRC2(rgSCHCmnInitDlRbAllocInfo);
14659 cmMemset((U8 *)&allocInfo->pcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14660 cmMemset((U8 *)&allocInfo->bcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14661 cmMemset((U8 *)allocInfo->raRspAlloc, (U8)0,
14662 RG_SCH_CMN_MAX_CMN_PDCCH*sizeof(RgSchDlRbAlloc));
14664 allocInfo->msg4Alloc.msg4DlSf = NULLP;
14665 cmLListInit(&allocInfo->msg4Alloc.msg4TxLst);
14666 cmLListInit(&allocInfo->msg4Alloc.msg4RetxLst);
14667 cmLListInit(&allocInfo->msg4Alloc.schdMsg4TxLst);
14668 cmLListInit(&allocInfo->msg4Alloc.schdMsg4RetxLst);
14669 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4TxLst);
14670 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4RetxLst);
14672 allocInfo->ccchSduAlloc.ccchSduDlSf = NULLP;
14673 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduTxLst);
14674 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduRetxLst);
14675 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduTxLst);
14676 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduRetxLst);
14677 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst);
14678 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst);
14681 allocInfo->dedAlloc.dedDlSf = NULLP;
14682 cmLListInit(&allocInfo->dedAlloc.txHqPLst);
14683 cmLListInit(&allocInfo->dedAlloc.retxHqPLst);
14684 cmLListInit(&allocInfo->dedAlloc.schdTxHqPLst);
14685 cmLListInit(&allocInfo->dedAlloc.schdRetxHqPLst);
14686 cmLListInit(&allocInfo->dedAlloc.nonSchdTxHqPLst);
14687 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxHqPLst);
14689 cmLListInit(&allocInfo->dedAlloc.txRetxHqPLst);
14690 cmLListInit(&allocInfo->dedAlloc.schdTxRetxHqPLst);
14691 cmLListInit(&allocInfo->dedAlloc.nonSchdTxRetxHqPLst);
14693 cmLListInit(&allocInfo->dedAlloc.txSpsHqPLst);
14694 cmLListInit(&allocInfo->dedAlloc.retxSpsHqPLst);
14695 cmLListInit(&allocInfo->dedAlloc.schdTxSpsHqPLst);
14696 cmLListInit(&allocInfo->dedAlloc.schdRetxSpsHqPLst);
14697 cmLListInit(&allocInfo->dedAlloc.nonSchdTxSpsHqPLst);
14698 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxSpsHqPLst);
14702 rgSCHLaaCmnInitDlRbAllocInfo (allocInfo);
14705 cmLListInit(&allocInfo->dedAlloc.errIndTxHqPLst);
14706 cmLListInit(&allocInfo->dedAlloc.schdErrIndTxHqPLst);
14707 cmLListInit(&allocInfo->dedAlloc.nonSchdErrIndTxHqPLst);
14712 * @brief Initialize the UL Allocation Information Structure.
14716 * Function: rgSCHCmnInitUlRbAllocInfo
14717 * Purpose: Initialize the UL Allocation Information Structure.
14719 * Invoked by: Common Scheduler
14721 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
14725 PUBLIC Void rgSCHCmnInitUlRbAllocInfo
14727 RgSchCmnUlRbAllocInfo *allocInfo
14730 PUBLIC Void rgSCHCmnInitUlRbAllocInfo(allocInfo)
14731 RgSchCmnUlRbAllocInfo *allocInfo;
14734 TRC2(rgSCHCmnInitUlRbAllocInfo);
14735 allocInfo->sf = NULLP;
14736 cmLListInit(&allocInfo->contResLst);
14737 cmLListInit(&allocInfo->schdContResLst);
14738 cmLListInit(&allocInfo->nonSchdContResLst);
14739 cmLListInit(&allocInfo->ueLst);
14740 cmLListInit(&allocInfo->schdUeLst);
14741 cmLListInit(&allocInfo->nonSchdUeLst);
14747 * @brief Scheduling for PUCCH group power control.
14751 * Function: rgSCHCmnGrpPwrCntrlPucch
14752 * Purpose: This function does group power control for PUCCH
14753 * corresponding to the subframe for which DL UE allocations
14756 * Invoked by: Common Scheduler
14758 * @param[in] RgSchCellCb *cell
14762 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch
14768 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch(cell, dlSf)
14773 TRC2(rgSCHCmnGrpPwrCntrlPucch);
14775 rgSCHPwrGrpCntrlPucch(cell, dlSf);
14781 * @brief Scheduling for PUSCH group power control.
14785 * Function: rgSCHCmnGrpPwrCntrlPusch
14786 * Purpose: This function does group power control, for
14787 * the subframe for which UL allocation has (just) happened.
14789 * Invoked by: Common Scheduler
14791 * @param[in] RgSchCellCb *cell
14792 * @param[in] RgSchUlSf *ulSf
14796 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch
14802 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch(cell, ulSf)
14807 /*removed unused variable *cellSch*/
14808 CmLteTimingInfo frm;
14811 TRC2(rgSCHCmnGrpPwrCntrlPusch);
14813 /* Got to pass DL SF corresponding to UL SF, so get that first.
14814 * There is no easy way of getting dlSf by having the RgSchUlSf*,
14815 * so use the UL delta from current time to get the DL SF. */
14816 frm = cell->crntTime;
14819 if(cell->emtcEnable == TRUE)
14821 RGSCH_INCR_SUB_FRAME_EMTC(frm, TFU_DLCNTRL_DLDELTA);
14826 RGSCH_INCR_SUB_FRAME(frm, TFU_DLCNTRL_DLDELTA);
14828 /* Del filling of dl.time */
14829 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14831 rgSCHPwrGrpCntrlPusch(cell, dlSf, ulSf);
14836 /* Fix: syed align multiple UEs to refresh at same time */
14837 /***********************************************************
14839 * Func : rgSCHCmnApplyUeRefresh
14841 * Desc : Apply UE refresh in CMN and Specific
14842 * schedulers. Data rates and corresponding
14843 * scratchpad variables are updated.
14851 **********************************************************/
14853 PRIVATE S16 rgSCHCmnApplyUeRefresh
14859 PRIVATE S16 rgSCHCmnApplyUeRefresh(cell, ue)
14864 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14866 U32 effNonGbrBsr = 0;
14869 TRC2(rgSCHCmnApplyUeRefresh);
14871 /* Reset the refresh cycle variableCAP */
14872 ue->ul.effAmbr = ue->ul.cfgdAmbr;
14874 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
14876 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
14878 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
14880 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
14882 cmnLcg->effGbr = cmnLcg->cfgdGbr;
14883 cmnLcg->effDeltaMbr = cmnLcg->deltaMbr;
14884 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
14885 /* Considering GBR LCG will be prioritised by UE */
14886 effGbrBsr += cmnLcg->bs;
14887 }/* Else no remaing BS so nonLcg0 will be updated when BSR will be received */
14890 effNonGbrBsr += cmnLcg->reportedBs;
14891 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
14895 effNonGbrBsr = RGSCH_MIN(effNonGbrBsr,ue->ul.effAmbr);
14896 ue->ul.nonGbrLcgBs = effNonGbrBsr;
14898 ue->ul.nonLcg0Bs = effGbrBsr + effNonGbrBsr;
14899 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
14900 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
14903 /* call scheduler specific event handlers
14904 * for refresh timer expiry */
14905 cellSch->apisUl->rgSCHUlUeRefresh(cell, ue);
14906 cellSch->apisDl->rgSCHDlUeRefresh(cell, ue);
14911 /***********************************************************
14913 * Func : rgSCHCmnTmrExpiry
14915 * Desc : Adds an UE to refresh queue, so that the UE is
14916 * periodically triggered to refresh it's GBR and
14925 **********************************************************/
14927 PRIVATE S16 rgSCHCmnTmrExpiry
14929 PTR cb, /* Pointer to timer control block */
14930 S16 tmrEvnt /* Timer Event */
14933 PRIVATE S16 rgSCHCmnTmrExpiry(cb, tmrEvnt)
14934 PTR cb; /* Pointer to timer control block */
14935 S16 tmrEvnt; /* Timer Event */
14938 RgSchUeCb *ue = (RgSchUeCb *)cb;
14939 RgSchCellCb *cell = ue->cell;
14940 #if (ERRCLASS & ERRCLS_DEBUG)
14943 TRC2(rgSCHCmnTmrExpiry);
14945 #if (ERRCLASS & ERRCLS_DEBUG)
14946 if (tmrEvnt != RG_SCH_CMN_EVNT_UE_REFRESH)
14948 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnTmrExpiry(): Invalid "
14949 "timer event CRNTI:%d",ue->ueId);
14956 rgSCHCmnApplyUeRefresh(cell, ue);
14958 rgSCHCmnAddUeToRefreshQ(cell, ue, RG_SCH_CMN_REFRESH_TIME);
14963 /***********************************************************
14965 * Func : rgSCHCmnTmrProc
14967 * Desc : Timer entry point per cell. Timer
14968 * processing is triggered at every frame boundary
14977 **********************************************************/
14979 PRIVATE S16 rgSCHCmnTmrProc
14984 PRIVATE S16 rgSCHCmnTmrProc(cell)
14988 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
14989 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
14990 /* Moving the assignment of scheduler pointer
14991 to available scope for optimization */
14992 TRC2(rgSCHCmnTmrProc);
14994 if ((cell->crntTime.subframe % RGSCH_NUM_SUB_FRAMES_5G) == 0)
14996 /* Reset the counters periodically */
14997 if ((cell->crntTime.sfn % RG_SCH_CMN_CSG_REFRESH_TIME) == 0)
14999 RG_SCH_RESET_HCSG_DL_PRB_CNTR(cmnDlCell);
15000 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cmnUlCell);
15002 if ((cell->crntTime.sfn % RG_SCH_CMN_OVRLDCTRL_REFRESH_TIME) == 0)
15005 cell->measurements.ulTpt = ((cell->measurements.ulTpt * 95) + ( cell->measurements.ulBytesCnt * 5))/100;
15006 cell->measurements.dlTpt = ((cell->measurements.dlTpt * 95) + ( cell->measurements.dlBytesCnt * 5))/100;
15008 rgSCHUtlCpuOvrLdAdjItbsCap(cell);
15009 /* reset cell level tpt measurements for next cycle */
15010 cell->measurements.ulBytesCnt = 0;
15011 cell->measurements.dlBytesCnt = 0;
15013 /* Comparing with Zero instead of % is being done for efficiency.
15014 * If Timer resolution changes then accordingly update the
15015 * macro RG_SCH_CMN_REFRESH_TIMERES */
15016 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
15017 cmPrcTmr(&sched->tmrTqCp, sched->tmrTq, (PFV)rgSCHCmnTmrExpiry);
15024 /***********************************************************
15026 * Func : rgSchCmnUpdCfiVal
15028 * Desc : Update the CFI value if CFI switch was done
15036 **********************************************************/
15038 PRIVATE Void rgSchCmnUpdCfiVal
15044 PRIVATE Void rgSchCmnUpdCfiVal(cell, delta)
15050 CmLteTimingInfo pdsch;
15051 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15060 TRC2(rgSchCmnUpdCfiVal);
15062 pdsch = cell->crntTime;
15063 RGSCH_INCR_SUB_FRAME(pdsch, delta);
15064 dlSf = rgSCHUtlSubFrmGet(cell, pdsch);
15065 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15066 *change happens in that SF then UL PDCCH allocation happens with old CFI
15067 *but CFI in control Req goes updated one since it was stored in the CELL
15069 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15070 if(cell->dynCfiCb.pdcchSfIdx != 0xFF)
15073 dlIdx = rgSCHUtlGetDlSfIdx(cell, &pdsch);
15075 dlIdx = (((pdsch.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (pdsch.subframe % RGSCH_NUM_SUB_FRAMES));
15076 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15078 /* If current downlink subframe index is same as pdcch SF index,
15079 * perform the switching of CFI in this subframe */
15080 if(cell->dynCfiCb.pdcchSfIdx == dlIdx)
15082 cellCmnDl->currCfi = cellCmnDl->newCfi;
15083 cell->dynCfiCb.pdcchSfIdx = 0xFF;
15085 /* Updating the nCce value based on the new CFI */
15087 splSfCfi = cellCmnDl->newCfi;
15088 for(idx = 0; idx < cell->numDlSubfrms; idx++)
15090 tddSf = cell->subFrms[idx];
15092 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][tddSf->sfNum];
15094 if(tddSf->sfType == RG_SCH_SPL_SF_DATA)
15096 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
15098 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
15102 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][cellCmnDl->currCfi];
15105 /* Setting the switch over window length based on config index.
15106 * During switch over period all the UL trnsmissions are Acked
15108 cell->dynCfiCb.switchOvrWinLen =
15109 rgSchCfiSwitchOvrWinLen[cell->ulDlCfgIdx];
15111 cell->nCce = cell->dynCfiCb.cfi2NCceTbl[0][cellCmnDl->currCfi];
15112 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15113 *change happens in that SF then UL PDCCH allocation happens with old CFI
15114 *but CFI in control Req goes updated one since it was stored in the CELL
15116 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15117 cell->dynCfiCb.switchOvrWinLen = rgSchCfiSwitchOvrWinLen[7];
15125 /***********************************************************
15127 * Func : rgSchCmnUpdtPdcchSfIdx
15129 * Desc : Update the switch over window length
15137 **********************************************************/
15140 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15147 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, sfNum)
15154 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15160 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx)
15168 TRC2(rgSchCmnUpdtPdcchSfIdx);
15170 /* Resetting the parameters on CFI switching */
15171 cell->dynCfiCb.cceUsed = 0;
15172 cell->dynCfiCb.lowCceCnt = 0;
15174 cell->dynCfiCb.cceFailSum = 0;
15175 cell->dynCfiCb.cceFailCnt = 0;
15176 cell->dynCfiCb.prevCceFailIdx = 0;
15178 cell->dynCfiCb.switchOvrInProgress = TRUE;
15180 for(idx = 0; idx < cell->dynCfiCb.numFailSamples; idx++)
15182 cell->dynCfiCb.cceFailSamples[idx] = 0;
15185 cell->dynCfiCb.ttiCnt = 0;
15187 cell->dynCfiCb.cfiSwitches++;
15188 cfiSwitchCnt = cell->dynCfiCb.cfiSwitches;
15191 cell->dynCfiCb.pdcchSfIdx = (dlIdx +
15192 rgSchTddPdcchSfIncTbl[cell->ulDlCfgIdx][sfNum]) % cell->numDlSubfrms;
15194 cell->dynCfiCb.pdcchSfIdx = (dlIdx + RG_SCH_CFI_APPLY_DELTA) % \
15195 RGSCH_NUM_DL_SUBFRAMES;
15199 /***********************************************************
15201 * Func : rgSchCmnUpdCfiDb
15203 * Desc : Update the counters related to dynamic
15204 * CFI feature in cellCb.
15212 **********************************************************/
15214 PUBLIC Void rgSchCmnUpdCfiDb
15220 PUBLIC Void rgSchCmnUpdCfiDb(cell, delta)
15225 CmLteTimingInfo frm;
15231 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15232 U8 nCceLowerCfi = 0;
15239 TRC2(rgSchCmnUpdCfiDb);
15241 /* Get Downlink Subframe */
15242 frm = cell->crntTime;
15243 RGSCH_INCR_SUB_FRAME(frm, delta);
15246 dlIdx = rgSCHUtlGetDlSfIdx(cell, &frm);
15247 dlSf = cell->subFrms[dlIdx];
15248 isHiDci0 = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15250 /* Changing the idexing
15251 so that proper subframe is selected */
15252 dlIdx = (((frm.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (frm.subframe % RGSCH_NUM_SUB_FRAMES));
15253 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15254 dlSf = cell->subFrms[dlIdx];
15257 currCfi = cellSch->dl.currCfi;
15259 if(!cell->dynCfiCb.switchOvrInProgress)
15262 if(!cell->dynCfiCb.isDynCfiEnb)
15264 if(currCfi != cellSch->cfiCfg.cfi)
15266 if(currCfi < cellSch->cfiCfg.cfi)
15268 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15269 cfiIncr = cell->dynCfiCb.cfiIncr;
15273 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15274 cfiDecr = cell->dynCfiCb.cfiDecr;
15281 /* Setting ttiMod to 0 for ttiCnt > 1000 in case if this
15282 * function was not called in UL subframe*/
15283 if(cell->dynCfiCb.ttiCnt > RGSCH_CFI_TTI_MON_INTRVL)
15290 ttiMod = cell->dynCfiCb.ttiCnt % RGSCH_CFI_TTI_MON_INTRVL;
15293 dlSf->dlUlBothCmplt++;
15295 if((dlSf->dlUlBothCmplt == 2) || (!isHiDci0))
15297 if(dlSf->dlUlBothCmplt == 2)
15300 /********************STEP UP CRITERIA********************/
15301 /* Updating the CCE failure count parameter */
15302 cell->dynCfiCb.cceFailCnt += dlSf->isCceFailure;
15303 cell->dynCfiCb.cceFailSum += dlSf->isCceFailure;
15305 /* Check if cfi step up can be performed */
15306 if(currCfi < cell->dynCfiCb.maxCfi)
15308 if(cell->dynCfiCb.cceFailSum >= cell->dynCfiCb.cfiStepUpTtiCnt)
15310 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15311 cfiIncr = cell->dynCfiCb.cfiIncr;
15316 /********************STEP DOWN CRITERIA********************/
15318 /* Updating the no. of CCE used in this dl subframe */
15319 cell->dynCfiCb.cceUsed += dlSf->cceCnt;
15321 if(currCfi > RGSCH_MIN_CFI_VAL)
15323 /* calculating the number of CCE for next lower CFI */
15325 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15326 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[mPhich][currCfi-1];
15328 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[0][currCfi-1];
15330 if(dlSf->cceCnt < nCceLowerCfi)
15332 /* Updating the count of TTIs in which no. of CCEs
15333 * used were less than the CCEs of next lower CFI */
15334 cell->dynCfiCb.lowCceCnt++;
15339 totalCce = (nCceLowerCfi * cell->dynCfiCb.cfiStepDownTtiCnt *
15340 RGSCH_CFI_CCE_PERCNTG)/100;
15342 if((!cell->dynCfiCb.cceFailSum) &&
15343 (cell->dynCfiCb.lowCceCnt >=
15344 cell->dynCfiCb.cfiStepDownTtiCnt) &&
15345 (cell->dynCfiCb.cceUsed < totalCce))
15347 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15348 cfiDecr = cell->dynCfiCb.cfiDecr;
15354 cceFailIdx = ttiMod/cell->dynCfiCb.failSamplePrd;
15356 if(cceFailIdx != cell->dynCfiCb.prevCceFailIdx)
15358 /* New sample period has started. Subtract the old count
15359 * from the new sample period */
15360 cell->dynCfiCb.cceFailSum -= cell->dynCfiCb.cceFailSamples[cceFailIdx];
15362 /* Store the previous sample period data */
15363 cell->dynCfiCb.cceFailSamples[cell->dynCfiCb.prevCceFailIdx]
15364 = cell->dynCfiCb.cceFailCnt;
15366 cell->dynCfiCb.prevCceFailIdx = cceFailIdx;
15368 /* Resetting the CCE failure count as zero for next sample period */
15369 cell->dynCfiCb.cceFailCnt = 0;
15374 /* Restting the parametrs after Monitoring Interval expired */
15375 cell->dynCfiCb.cceUsed = 0;
15376 cell->dynCfiCb.lowCceCnt = 0;
15377 cell->dynCfiCb.ttiCnt = 0;
15380 cell->dynCfiCb.ttiCnt++;
15384 if(cellSch->dl.newCfi != cellSch->dl.currCfi)
15387 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, dlSf->sfNum);
15389 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx);
15396 * @brief Dl Scheduler for Broadcast and Common channel scheduling.
15400 * Function: rgSCHCmnDlCommonChSch
15401 * Purpose: This function schedules DL Common channels for LTE.
15402 * Invoked by TTI processing in TOM. Scheduling is done for
15403 * BCCH, PCCH, Msg4, CCCH SDU, RAR in that order
15405 * Invoked by: TOM (TTI processing)
15407 * @param[in] RgSchCellCb *cell
15411 PUBLIC Void rgSCHCmnDlCommonChSch
15416 PUBLIC Void rgSCHCmnDlCommonChSch(cell)
15420 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15422 TRC2(rgSCHCmnDlCommonChSch);
15424 cellSch->apisDl->rgSCHDlTickForPdbTrkng(cell);
15425 rgSchCmnUpdCfiVal(cell, RG_SCH_CMN_DL_DELTA);
15427 /* handle Inactive UEs for DL */
15428 rgSCHCmnHdlDlInactUes(cell);
15430 /* Send a Tick to Refresh Timer */
15431 rgSCHCmnTmrProc(cell);
15433 if (cell->isDlDataAllwd && (cell->stopSiSch == FALSE))
15435 rgSCHCmnInitRbAlloc(cell);
15436 /* Perform DL scheduling of BCCH, PCCH */
15437 rgSCHCmnDlBcchPcchAlloc(cell);
15441 if(cell->siCb.inWindow != 0)
15443 cell->siCb.inWindow--;
15446 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
15448 rgSCHCmnDlCcchRarAlloc(cell);
15454 * @brief Scheduler invocation per TTI.
15458 * Function: rgSCHCmnUlSch
15459 * Purpose: This function implements UL scheduler alone. This is to
15460 * be able to perform scheduling with more flexibility.
15462 * Invoked by: TOM (TTI processing)
15464 * @param[in] RgSchCellCb *cell
15468 PUBLIC Void rgSCHCmnUlSch
15473 PUBLIC Void rgSCHCmnUlSch(cell)
15477 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15479 TRC2(rgSCHCmnUlSch);
15483 if(TRUE == rgSCHLaaSCellEnabled(cell))
15489 if(cellSch->ul.schdIdx != RGSCH_INVALID_INFO)
15491 rgSchCmnUpdCfiVal(cell, TFU_ULCNTRL_DLDELTA);
15493 /* Handle Inactive UEs for UL */
15494 rgSCHCmnHdlUlInactUes(cell);
15495 /* Perform UL Scheduling EVERY TTI */
15496 rgSCHCmnUlAlloc(cell);
15498 /* Calling function to update CFI parameters*/
15499 rgSchCmnUpdCfiDb(cell, TFU_ULCNTRL_DLDELTA);
15501 if(cell->dynCfiCb.switchOvrWinLen > 0)
15503 /* Decrementing the switchover window length */
15504 cell->dynCfiCb.switchOvrWinLen--;
15506 if(!cell->dynCfiCb.switchOvrWinLen)
15508 if(cell->dynCfiCb.dynCfiRecfgPend)
15510 /* Toggling the Dynamic CFI enabling */
15511 cell->dynCfiCb.isDynCfiEnb ^= 1;
15512 rgSCHDynCfiReCfg(cell, cell->dynCfiCb.isDynCfiEnb);
15513 cell->dynCfiCb.dynCfiRecfgPend = FALSE;
15515 cell->dynCfiCb.switchOvrInProgress = FALSE;
15523 rgSCHCmnSpsUlTti(cell, NULLP);
15533 * @brief This function updates the scheduler with service for an UE.
15537 * Function: rgSCHCmnDlDedBoUpd
15538 * Purpose: This function should be called whenever there is a
15539 * change BO for a service.
15541 * Invoked by: BO and Scheduler
15543 * @param[in] RgSchCellCb* cell
15544 * @param[in] RgSchUeCb* ue
15545 * @param[in] RgSchDlLcCb* svc
15550 PUBLIC Void rgSCHCmnDlDedBoUpd
15557 PUBLIC Void rgSCHCmnDlDedBoUpd(cell, ue, svc)
15563 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15564 TRC2(rgSCHCmnDlDedBoUpd);
15566 /* RACHO : if UEs idle time exceeded and a BO update
15567 * is received, then add UE to the pdcch Order Q */
15568 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
15570 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue, cell);
15571 /* If PDCCH order is already triggered and we are waiting for
15572 * RACH from UE then do not add to PdcchOdrQ. */
15573 if (ueDl->rachInfo.rapIdLnk.node == NULLP)
15575 rgSCHCmnDlAdd2PdcchOdrQ(cell, ue);
15581 /* If SPS service, invoke SPS module */
15582 if (svc->dlLcSpsCfg.isSpsEnabled)
15584 rgSCHCmnSpsDlDedBoUpd(cell, ue, svc);
15585 /* Note: Retrun from here, no update needed in other schedulers */
15590 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
15592 cellSch->apisEmtcDl->rgSCHDlDedBoUpd(cell, ue, svc);
15593 //printf("rgSCHEMTCDlDedBoUpd\n");
15598 cellSch->apisDl->rgSCHDlDedBoUpd(cell, ue, svc);
15603 rgSCHSCellDlDedBoUpd(cell, ue, svc);
15611 * @brief Removes an UE from Cell's TA List.
15615 * Function: rgSCHCmnRmvFrmTaLst
15616 * Purpose: Removes an UE from Cell's TA List.
15618 * Invoked by: Specific Scheduler
15620 * @param[in] RgSchCellCb* cell
15621 * @param[in] RgSchUeCb* ue
15626 PUBLIC Void rgSCHCmnRmvFrmTaLst
15632 PUBLIC Void rgSCHCmnRmvFrmTaLst(cell, ue)
15637 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15638 TRC2(rgSCHCmnRmvFrmTaLst);
15641 if(cell->emtcEnable && ue->isEmtcUe)
15643 rgSCHEmtcRmvFrmTaLst(cellCmnDl,ue);
15648 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
15649 ue->dlTaLnk.node = (PTR)NULLP;
15654 /* Fix: syed Remove the msg4Proc from cell
15655 * msg4Retx Queue. I have used CMN scheduler function
15656 * directly. Please define a new API and call this
15657 * function through that. */
15660 * @brief This function removes MSG4 HARQ process from cell RETX Queues.
15664 * Function: rgSCHCmnDlMsg4ProcRmvFrmRetx
15665 * Purpose: This function removes MSG4 HARQ process from cell RETX Queues.
15667 * Invoked by: UE/RACB deletion.
15669 * @param[in] RgSchCellCb* cell
15670 * @param[in] RgSchDlHqProc* hqP
15675 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx
15678 RgSchDlHqProcCb *hqP
15681 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx(cell, hqP)
15683 RgSchDlHqProcCb *hqP;
15686 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15687 TRC2(rgSCHCmnDlMsg4ProcRmvFrmRetx);
15689 if (hqP->tbInfo[0].ccchSchdInfo.retxLnk.node)
15691 if (hqP->hqE->msg4Proc == hqP)
15693 cmLListDelFrm(&cellSch->dl.msg4RetxLst, \
15694 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15695 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15698 else if(hqP->hqE->ccchSduProc == hqP)
15700 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
15701 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15702 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15711 * @brief This function adds a HARQ process for retx.
15715 * Function: rgSCHCmnDlProcAddToRetx
15716 * Purpose: This function adds a HARQ process to retransmission
15717 * queue. This may be performed when a HARQ ack is
15720 * Invoked by: HARQ feedback processing
15722 * @param[in] RgSchCellCb* cell
15723 * @param[in] RgSchDlHqProc* hqP
15728 PUBLIC Void rgSCHCmnDlProcAddToRetx
15731 RgSchDlHqProcCb *hqP
15734 PUBLIC Void rgSCHCmnDlProcAddToRetx(cell, hqP)
15736 RgSchDlHqProcCb *hqP;
15739 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15740 TRC2(rgSCHCmnDlProcAddToRetx);
15742 if (hqP->hqE->msg4Proc == hqP) /* indicating msg4 transmission */
15744 cmLListAdd2Tail(&cellSch->dl.msg4RetxLst, \
15745 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15746 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15749 else if(hqP->hqE->ccchSduProc == hqP)
15751 /*If CCCH SDU being transmitted without cont res CE*/
15752 cmLListAdd2Tail(&cellSch->dl.ccchSduRetxLst,
15753 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15754 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15760 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
15762 /* Invoke SPS module for SPS HARQ proc re-transmission handling */
15763 rgSCHCmnSpsDlProcAddToRetx(cell, hqP);
15766 #endif /* LTEMAC_SPS */
15768 if((TRUE == cell->emtcEnable)
15769 && (TRUE == hqP->hqE->ue->isEmtcUe))
15771 cellSch->apisEmtcDl->rgSCHDlProcAddToRetx(cell, hqP);
15776 cellSch->apisDl->rgSCHDlProcAddToRetx(cell, hqP);
15784 * @brief This function performs RI validation and
15785 * updates it to the ueCb.
15789 * Function: rgSCHCmnDlSetUeRi
15790 * Purpose: This function performs RI validation and
15791 * updates it to the ueCb.
15793 * Invoked by: rgSCHCmnDlCqiInd
15795 * @param[in] RgSchCellCb *cell
15796 * @param[in] RgSchUeCb *ue
15798 * @param[in] Bool isPeriodic
15803 PRIVATE Void rgSCHCmnDlSetUeRi
15811 PRIVATE Void rgSCHCmnDlSetUeRi(cell, ue, ri, isPer)
15818 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15819 RgSchCmnUeInfo *ueSchCmn = RG_SCH_CMN_GET_CMN_UE(ue);
15820 TRC2(rgSCHCmnDlSetUeRi);
15823 RgSchUePCqiCb *cqiCb = RG_SCH_GET_UE_CELL_CQI_CB(ue,cell);
15828 /* FIX for RRC Reconfiguration issue */
15829 /* ccpu00140894- During Tx Mode transition RI report will not entertained for
15830 * specific during which SCH expecting UE can complete TX mode transition*/
15831 if (ue->txModeTransCmplt == FALSE)
15836 /* Restrict the Number of TX layers to cell->numTxAntPorts.
15837 * Protection from invalid RI values. */
15838 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
15840 /* Special case of converting PMI to sane value when
15841 * there is a switch in RI from 1 to 2 and PMI reported
15842 * for RI=1 is invalid for RI=2 */
15843 if ((cell->numTxAntPorts == 2) && (ue->mimoInfo.txMode == RGR_UE_TM_4))
15845 if ((ri == 2) && ( ueDl->mimoInfo.ri == 1))
15847 ueDl->mimoInfo.pmi = (ueDl->mimoInfo.pmi < 2)? 1:2;
15851 /* Restrict the Number of TX layers according to the UE Category */
15852 ueDl->mimoInfo.ri = RGSCH_MIN(ri, rgUeCatTbl[ueSchCmn->ueCat].maxTxLyrs);
15854 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].riCnt[ueDl->mimoInfo.ri-1]++;
15855 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15859 ue->tenbStats->stats.nonPersistent.sch[0].riCnt[ueDl->mimoInfo.ri-1]++;
15860 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15866 /* If RI is from Periodic CQI report */
15867 cqiCb->perRiVal = ueDl->mimoInfo.ri;
15868 /* Reset at every Periodic RI Reception */
15869 cqiCb->invalidateCqi = FALSE;
15873 /* If RI is from Aperiodic CQI report */
15874 if (cqiCb->perRiVal != ueDl->mimoInfo.ri)
15876 /* if this aperRI is different from last reported
15877 * perRI then invalidate all CQI reports till next
15879 cqiCb->invalidateCqi = TRUE;
15883 cqiCb->invalidateCqi = FALSE;
15888 if (ueDl->mimoInfo.ri > 1)
15890 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15892 else if (ue->mimoInfo.txMode == RGR_UE_TM_3) /* ri == 1 */
15894 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15902 * @brief This function performs PMI validation and
15903 * updates it to the ueCb.
15907 * Function: rgSCHCmnDlSetUePmi
15908 * Purpose: This function performs PMI validation and
15909 * updates it to the ueCb.
15911 * Invoked by: rgSCHCmnDlCqiInd
15913 * @param[in] RgSchCellCb *cell
15914 * @param[in] RgSchUeCb *ue
15915 * @param[in] U8 pmi
15920 PRIVATE S16 rgSCHCmnDlSetUePmi
15927 PRIVATE S16 rgSCHCmnDlSetUePmi(cell, ue, pmi)
15933 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15934 TRC2(rgSCHCmnDlSetUePmi);
15936 if (ue->txModeTransCmplt == FALSE)
15941 if (cell->numTxAntPorts == 2)
15947 if (ueDl->mimoInfo.ri == 2)
15949 /*ccpu00118150 - MOD - changed pmi value validation from 0 to 2*/
15950 /* PMI 2 and 3 are invalid incase of 2 TxAnt and 2 Layered SM */
15951 if (pmi == 2 || pmi == 3)
15955 ueDl->mimoInfo.pmi = pmi+1;
15959 ueDl->mimoInfo.pmi = pmi;
15962 else if (cell->numTxAntPorts == 4)
15968 ueDl->mimoInfo.pmi = pmi;
15970 /* Reset the No PMI Flag in forceTD */
15971 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
15976 * @brief This function Updates the DL CQI on PUCCH for the UE.
15980 * Function: rgSCHCmnDlProcCqiMode10
15982 * This function updates the DL CQI on PUCCH for the UE.
15984 * Invoked by: rgSCHCmnDlCqiOnPucchInd
15986 * Processing Steps:
15988 * @param[in] RgSchCellCb *cell
15989 * @param[in] RgSchUeCb *ue
15990 * @param[in] TfuDlCqiRpt *dlCqiRpt
15995 #ifdef RGR_CQI_REPT
15997 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16001 TfuDlCqiPucch *pucchCqi,
16005 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail)
16008 TfuDlCqiPucch *pucchCqi;
16013 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16017 TfuDlCqiPucch *pucchCqi
16020 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi)
16023 TfuDlCqiPucch *pucchCqi;
16027 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16028 TRC2(rgSCHCmnDlProcCqiMode10);
16030 if (pucchCqi->u.mode10Info.type == TFU_RPT_CQI)
16032 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16033 /* Checking whether the decoded CQI is a value between 1 and 15*/
16034 if((pucchCqi->u.mode10Info.u.cqi) && (pucchCqi->u.mode10Info.u.cqi
16035 < RG_SCH_CMN_MAX_CQI))
16037 ueDl->cqiFlag = TRUE;
16038 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode10Info.u.cqi;
16039 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16040 /* ccpu00117452 - MOD - Changed macro name from
16041 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16042 #ifdef RGR_CQI_REPT
16043 *isCqiAvail = TRUE;
16051 else if (pucchCqi->u.mode10Info.type == TFU_RPT_RI)
16053 if ( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode10Info.u.ri) )
16055 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode10Info.u.ri,
16060 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16061 pucchCqi->u.mode10Info.u.ri,ue->ueId);
16068 * @brief This function Updates the DL CQI on PUCCH for the UE.
16072 * Function: rgSCHCmnDlProcCqiMode11
16074 * This function updates the DL CQI on PUCCH for the UE.
16076 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16078 * Processing Steps:
16079 * Process CQI MODE 11
16080 * @param[in] RgSchCellCb *cell
16081 * @param[in] RgSchUeCb *ue
16082 * @param[in] TfuDlCqiRpt *dlCqiRpt
16087 #ifdef RGR_CQI_REPT
16089 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16093 TfuDlCqiPucch *pucchCqi,
16095 Bool *is2ndCwCqiAvail
16098 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16101 TfuDlCqiPucch *pucchCqi;
16103 Bool *is2ndCwCqiAvail;
16107 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16111 TfuDlCqiPucch *pucchCqi
16114 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi)
16117 TfuDlCqiPucch *pucchCqi;
16121 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16122 TRC2(rgSCHCmnDlProcCqiMode11);
16124 if (pucchCqi->u.mode11Info.type == TFU_RPT_CQI)
16126 ue->mimoInfo.puschFdbkVld = FALSE;
16127 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16128 if((pucchCqi->u.mode11Info.u.cqi.cqi) &&
16129 (pucchCqi->u.mode11Info.u.cqi.cqi < RG_SCH_CMN_MAX_CQI))
16131 ueDl->cqiFlag = TRUE;
16132 /* ccpu00117452 - MOD - Changed macro name from
16133 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16134 #ifdef RGR_CQI_REPT
16135 *isCqiAvail = TRUE;
16137 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode11Info.u.cqi.cqi;
16138 if (pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.pres)
16140 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16141 ueDl->mimoInfo.cwInfo[1].cqi, \
16142 pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.val);
16143 #ifdef RGR_CQI_REPT
16144 /* ccpu00117259 - ADD - Considering second codeword CQI info
16145 incase of MIMO for CQI Reporting */
16146 *is2ndCwCqiAvail = TRUE;
16154 rgSCHCmnDlSetUePmi(cell, ue, \
16155 pucchCqi->u.mode11Info.u.cqi.pmi);
16157 else if (pucchCqi->u.mode11Info.type == TFU_RPT_RI)
16159 if( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode11Info.u.ri))
16161 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode11Info.u.ri,
16166 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16167 pucchCqi->u.mode11Info.u.ri,ue->ueId);
16174 * @brief This function Updates the DL CQI on PUCCH for the UE.
16178 * Function: rgSCHCmnDlProcCqiMode20
16180 * This function updates the DL CQI on PUCCH for the UE.
16182 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16184 * Processing Steps:
16185 * Process CQI MODE 20
16186 * @param[in] RgSchCellCb *cell
16187 * @param[in] RgSchUeCb *ue
16188 * @param[in] TfuDlCqiRpt *dlCqiRpt
16193 #ifdef RGR_CQI_REPT
16195 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16199 TfuDlCqiPucch *pucchCqi,
16203 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail )
16206 TfuDlCqiPucch *pucchCqi;
16211 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16215 TfuDlCqiPucch *pucchCqi
16218 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi)
16221 TfuDlCqiPucch *pucchCqi;
16225 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16226 TRC2(rgSCHCmnDlProcCqiMode20);
16228 if (pucchCqi->u.mode20Info.type == TFU_RPT_CQI)
16230 if (pucchCqi->u.mode20Info.u.cqi.isWideband)
16232 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16233 if((pucchCqi->u.mode20Info.u.cqi.u.wideCqi) &&
16234 (pucchCqi->u.mode20Info.u.cqi.u.wideCqi < RG_SCH_CMN_MAX_CQI))
16236 ueDl->cqiFlag = TRUE;
16237 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode20Info.u.cqi.\
16239 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16240 /* ccpu00117452 - MOD - Changed macro name from
16241 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16242 #ifdef RGR_CQI_REPT
16243 *isCqiAvail = TRUE;
16252 else if (pucchCqi->u.mode20Info.type == TFU_RPT_RI)
16254 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode20Info.u.ri))
16256 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode20Info.u.ri,
16261 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16262 pucchCqi->u.mode20Info.u.ri,ue->ueId);
16270 * @brief This function Updates the DL CQI on PUCCH for the UE.
16274 * Function: rgSCHCmnDlProcCqiMode21
16276 * This function updates the DL CQI on PUCCH for the UE.
16278 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16280 * Processing Steps:
16281 * Process CQI MODE 21
16282 * @param[in] RgSchCellCb *cell
16283 * @param[in] RgSchUeCb *ue
16284 * @param[in] TfuDlCqiRpt *dlCqiRpt
16289 #ifdef RGR_CQI_REPT
16291 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16295 TfuDlCqiPucch *pucchCqi,
16297 Bool *is2ndCwCqiAvail
16300 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16303 TfuDlCqiPucch *pucchCqi;
16304 TfuDlCqiRpt *dlCqiRpt;
16306 Bool *is2ndCwCqiAvail;
16310 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16314 TfuDlCqiPucch *pucchCqi
16317 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi)
16320 TfuDlCqiPucch *pucchCqi;
16324 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16325 TRC2(rgSCHCmnDlProcCqiMode21);
16327 if (pucchCqi->u.mode21Info.type == TFU_RPT_CQI)
16329 ue->mimoInfo.puschFdbkVld = FALSE;
16330 if (pucchCqi->u.mode21Info.u.cqi.isWideband)
16332 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16333 if((pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi) &&
16334 (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi < RG_SCH_CMN_MAX_CQI))
16336 ueDl->cqiFlag = TRUE;
16337 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode21Info.u.cqi.\
16339 if (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.pres)
16341 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16342 ueDl->mimoInfo.cwInfo[1].cqi, \
16343 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.val);
16344 #ifdef RGR_CQI_REPT
16345 /* ccpu00117259 - ADD - Considering second codeword CQI info
16346 incase of MIMO for CQI Reporting */
16347 *is2ndCwCqiAvail = TRUE;
16350 /* ccpu00117452 - MOD - Changed macro name from
16351 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16352 #ifdef RGR_CQI_REPT
16353 *isCqiAvail = TRUE;
16360 rgSCHCmnDlSetUePmi(cell, ue, \
16361 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.pmi);
16364 else if (pucchCqi->u.mode21Info.type == TFU_RPT_RI)
16366 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode21Info.u.ri))
16368 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode21Info.u.ri,
16373 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16374 pucchCqi->u.mode21Info.u.ri,ue->ueId);
16382 * @brief This function Updates the DL CQI on PUCCH for the UE.
16386 * Function: rgSCHCmnDlCqiOnPucchInd
16388 * This function updates the DL CQI on PUCCH for the UE.
16390 * Invoked by: rgSCHCmnDlCqiInd
16392 * Processing Steps:
16393 * - Depending on the reporting mode of the PUCCH, the CQI/PMI/RI values
16394 * are updated and stored for each UE
16396 * @param[in] RgSchCellCb *cell
16397 * @param[in] RgSchUeCb *ue
16398 * @param[in] TfuDlCqiRpt *dlCqiRpt
16403 #ifdef RGR_CQI_REPT
16405 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16409 TfuDlCqiPucch *pucchCqi,
16410 RgrUeCqiRept *ueCqiRept,
16412 Bool *is2ndCwCqiAvail
16415 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16418 TfuDlCqiPucch *pucchCqi;
16419 RgrUeCqiRept *ueCqiRept;
16421 Bool *is2ndCwCqiAvail;
16425 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16429 TfuDlCqiPucch *pucchCqi
16432 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi)
16435 TfuDlCqiPucch *pucchCqi;
16439 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16440 TRC2(rgSCHCmnDlCqiOnPucchInd);
16442 /* ccpu00117452 - MOD - Changed
16443 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16444 #ifdef RGR_CQI_REPT
16445 /* Save CQI mode information in the report */
16446 ueCqiRept->cqiMode = pucchCqi->mode;
16449 switch(pucchCqi->mode)
16451 case TFU_PUCCH_CQI_MODE10:
16452 #ifdef RGR_CQI_REPT
16453 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail);
16455 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi);
16457 ueDl->cqiFlag = TRUE;
16459 case TFU_PUCCH_CQI_MODE11:
16460 #ifdef RGR_CQI_REPT
16461 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail,
16464 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi);
16466 ueDl->cqiFlag = TRUE;
16468 case TFU_PUCCH_CQI_MODE20:
16469 #ifdef RGR_CQI_REPT
16470 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail);
16472 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi);
16474 ueDl->cqiFlag = TRUE;
16476 case TFU_PUCCH_CQI_MODE21:
16477 #ifdef RGR_CQI_REPT
16478 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail,
16481 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi);
16483 ueDl->cqiFlag = TRUE;
16487 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unknown CQI Mode %d",
16488 pucchCqi->mode,ue->ueId);
16489 /* ccpu00117452 - MOD - Changed macro name from
16490 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16491 #ifdef RGR_CQI_REPT
16492 *isCqiAvail = FALSE;
16499 } /* rgSCHCmnDlCqiOnPucchInd */
16503 * @brief This function Updates the DL CQI on PUSCH for the UE.
16507 * Function: rgSCHCmnDlCqiOnPuschInd
16509 * This function updates the DL CQI on PUSCH for the UE.
16511 * Invoked by: rgSCHCmnDlCqiInd
16513 * Processing Steps:
16514 * - Depending on the reporting mode of the PUSCH, the CQI/PMI/RI values
16515 * are updated and stored for each UE
16517 * @param[in] RgSchCellCb *cell
16518 * @param[in] RgSchUeCb *ue
16519 * @param[in] TfuDlCqiRpt *dlCqiRpt
16524 #ifdef RGR_CQI_REPT
16526 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16530 TfuDlCqiPusch *puschCqi,
16531 RgrUeCqiRept *ueCqiRept,
16533 Bool *is2ndCwCqiAvail
16536 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16539 TfuDlCqiPusch *puschCqi;
16540 RgrUeCqiRept *ueCqiRept;
16542 Bool *is2ndCwCqiAvail;
16546 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16550 TfuDlCqiPusch *puschCqi
16553 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi)
16556 TfuDlCqiPusch *puschCqi;
16560 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16562 TRC2(rgSCHCmnDlCqiOnPuschInd);
16563 if (puschCqi->ri.pres == PRSNT_NODEF)
16565 if (RG_SCH_CMN_IS_RI_VALID(puschCqi->ri.val))
16567 /* Saving the previous ri value to revert back
16568 in case PMI update failed */
16569 if (RGR_UE_TM_4 == ue->mimoInfo.txMode ) /* Cheking for TM4. TM8 check later */
16571 prevRiVal = ueDl->mimoInfo.ri;
16573 rgSCHCmnDlSetUeRi(cell, ue, puschCqi->ri.val, FALSE);
16577 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16578 puschCqi->ri.val,ue->ueId);
16582 ue->mimoInfo.puschFdbkVld = FALSE;
16583 /* ccpu00117452 - MOD - Changed macro name from
16584 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16585 #ifdef RGR_CQI_REPT
16586 /* Save CQI mode information in the report */
16587 ueCqiRept->cqiMode = puschCqi->mode;
16588 /* ccpu00117259 - DEL - removed default setting of isCqiAvail to TRUE */
16591 switch(puschCqi->mode)
16593 case TFU_PUSCH_CQI_MODE_20:
16594 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16595 /* Checking whether the decoded CQI is a value between 1 and 15*/
16596 if((puschCqi->u.mode20Info.wideBandCqi) &&
16597 (puschCqi->u.mode20Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16599 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode20Info.wideBandCqi;
16600 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16601 /* ccpu00117452 - MOD - Changed macro name from
16602 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16603 #ifdef RGR_CQI_REPT
16604 *isCqiAvail = TRUE;
16612 case TFU_PUSCH_CQI_MODE_30:
16613 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16614 if((puschCqi->u.mode30Info.wideBandCqi) &&
16615 (puschCqi->u.mode30Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16617 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode30Info.wideBandCqi;
16618 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16619 /* ccpu00117452 - MOD - Changed macro name from
16620 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16621 #ifdef RGR_CQI_REPT
16622 *isCqiAvail = TRUE;
16626 extern U32 gACqiRcvdCount;
16637 case TFU_PUSCH_CQI_MODE_12:
16638 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16639 if((puschCqi->u.mode12Info.cqiIdx[0]) &&
16640 (puschCqi->u.mode12Info.cqiIdx[0] < RG_SCH_CMN_MAX_CQI))
16642 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode12Info.cqiIdx[0];
16643 /* ccpu00117452 - MOD - Changed macro name from
16644 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16645 #ifdef RGR_CQI_REPT
16646 *isCqiAvail = TRUE;
16653 if((puschCqi->u.mode12Info.cqiIdx[1]) &&
16654 (puschCqi->u.mode12Info.cqiIdx[1] < RG_SCH_CMN_MAX_CQI))
16656 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode12Info.cqiIdx[1];
16657 /* ccpu00117452 - MOD - Changed macro name from
16658 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16659 #ifdef RGR_CQI_REPT
16660 /* ccpu00117259 - ADD - Considering second codeword CQI info
16661 incase of MIMO for CQI Reporting */
16662 *is2ndCwCqiAvail = TRUE;
16669 ue->mimoInfo.puschFdbkVld = TRUE;
16670 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_12;
16671 ue->mimoInfo.puschPmiInfo.u.mode12Info = puschCqi->u.mode12Info;
16672 /* : resetting this is time based. Make use of CQI reporting
16673 * periodicity, DELTA's in determining the exact time at which this
16674 * need to be reset. */
16676 case TFU_PUSCH_CQI_MODE_22:
16677 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16678 if((puschCqi->u.mode22Info.wideBandCqi[0]) &&
16679 (puschCqi->u.mode22Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16681 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode22Info.wideBandCqi[0];
16682 /* ccpu00117452 - MOD - Changed macro name from
16683 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16684 #ifdef RGR_CQI_REPT
16685 *isCqiAvail = TRUE;
16692 if((puschCqi->u.mode22Info.wideBandCqi[1]) &&
16693 (puschCqi->u.mode22Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16695 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode22Info.wideBandCqi[1];
16696 /* ccpu00117452 - MOD - Changed macro name from
16697 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16698 #ifdef RGR_CQI_REPT
16699 /* ccpu00117259 - ADD - Considering second codeword CQI info
16700 incase of MIMO for CQI Reporting */
16701 *is2ndCwCqiAvail = TRUE;
16708 rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode22Info.wideBandPmi);
16709 ue->mimoInfo.puschFdbkVld = TRUE;
16710 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_22;
16711 ue->mimoInfo.puschPmiInfo.u.mode22Info = puschCqi->u.mode22Info;
16713 case TFU_PUSCH_CQI_MODE_31:
16714 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16715 if((puschCqi->u.mode31Info.wideBandCqi[0]) &&
16716 (puschCqi->u.mode31Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16718 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode31Info.wideBandCqi[0];
16719 /* ccpu00117452 - MOD - Changed macro name from
16720 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16721 #ifdef RGR_CQI_REPT
16722 *isCqiAvail = TRUE;
16725 if (ueDl->mimoInfo.ri > 1)
16727 if((puschCqi->u.mode31Info.wideBandCqi[1]) &&
16728 (puschCqi->u.mode31Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16730 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode31Info.wideBandCqi[1];
16731 /* ccpu00117452 - MOD - Changed macro name from
16732 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16733 #ifdef RGR_CQI_REPT
16734 /* ccpu00117259 - ADD - Considering second codeword CQI info
16735 incase of MIMO for CQI Reporting */
16736 *is2ndCwCqiAvail = TRUE;
16740 if (rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode31Info.pmi) != ROK)
16742 /* To avoid Rank and PMI inconsistency */
16743 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16744 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16746 ueDl->mimoInfo.ri = prevRiVal;
16749 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_31;
16750 ue->mimoInfo.puschPmiInfo.u.mode31Info = puschCqi->u.mode31Info;
16754 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Unknown CQI Mode %d CRNTI:%d",
16755 puschCqi->mode,ue->ueId);
16756 /* CQI decoding failed revert the RI to previous value */
16757 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16758 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16760 ueDl->mimoInfo.ri = prevRiVal;
16762 /* ccpu00117452 - MOD - Changed macro name from
16763 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16764 #ifdef RGR_CQI_REPT
16765 *isCqiAvail = FALSE;
16766 /* ccpu00117259 - ADD - Considering second codeword CQI info
16767 incase of MIMO for CQI Reporting */
16768 *is2ndCwCqiAvail = FALSE;
16775 } /* rgSCHCmnDlCqiOnPuschInd */
16779 * @brief This function Updates the DL CQI for the UE.
16783 * Function: rgSCHCmnDlCqiInd
16784 * Purpose: Updates the DL CQI for the UE
16788 * @param[in] RgSchCellCb *cell
16789 * @param[in] RgSchUeCb *ue
16790 * @param[in] TfuDlCqiRpt *dlCqi
16795 PUBLIC Void rgSCHCmnDlCqiInd
16801 CmLteTimingInfo timingInfo
16804 PUBLIC Void rgSCHCmnDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo)
16809 CmLteTimingInfo timingInfo;
16812 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
16813 /* ccpu00117452 - MOD - Changed macro name from
16814 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16815 #ifdef RGR_CQI_REPT
16816 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16817 RgrUeCqiRept ueCqiRept = {{0}};
16818 Bool isCqiAvail = FALSE;
16819 /* ccpu00117259 - ADD - Considering second codeword CQI info
16820 incase of MIMO for CQI Reporting */
16821 Bool is2ndCwCqiAvail = FALSE;
16824 TRC2(rgSCHCmnDlCqiInd);
16826 #ifdef RGR_CQI_REPT
16829 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16833 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16838 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi);
16842 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi);
16846 #ifdef CQI_CONFBITMASK_DROP
16847 if(!ue->cqiConfBitMask)
16849 if (ueDl->mimoInfo.cwInfo[0].cqi >15)
16851 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16852 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16854 else if ( ueDl->mimoInfo.cwInfo[0].cqi >= ue->prevCqi)
16856 ue->prevCqi = ueDl->mimoInfo.cwInfo[0].cqi;
16860 U8 dlCqiDeltaPrev = 0;
16861 dlCqiDeltaPrev = ue->prevCqi - ueDl->mimoInfo.cwInfo[0].cqi;
16862 if (dlCqiDeltaPrev > 3)
16863 dlCqiDeltaPrev = 3;
16864 if ((ue->prevCqi - dlCqiDeltaPrev) < 6)
16870 ue->prevCqi = ue->prevCqi - dlCqiDeltaPrev;
16872 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16873 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16879 /* ccpu00117452 - MOD - Changed macro name from
16880 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16881 #ifdef RGR_CQI_REPT
16882 /* ccpu00117259 - ADD - Considering second codeword CQI info
16883 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail\
16884 in 'if' condition*/
16885 if (RG_SCH_CQIR_IS_PUSHNCQI_ENBLE(ue) && (isCqiAvail || is2ndCwCqiAvail))
16887 ueCqiRept.cqi[0] = ueDl->mimoInfo.cwInfo[0].cqi;
16889 /* ccpu00117259 - ADD - Considering second codeword CQI info
16890 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail
16891 in 'if' condition*/
16892 ueCqiRept.cqi[1] = 0;
16893 if(is2ndCwCqiAvail)
16895 ueCqiRept.cqi[1] = ueDl->mimoInfo.cwInfo[1].cqi;
16897 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, &ueCqiRept);
16902 rgSCHCmnDlSetUeAllocLmtLa(cell, ue);
16903 rgSCHCheckAndSetTxScheme(cell, ue);
16906 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), ue->isEmtcUe);
16908 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), FALSE);
16912 if (cellSch->dl.isDlFreqSel)
16914 cellSch->apisDlfs->rgSCHDlfsDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo);
16917 /* Call SPS module to update CQI indication */
16918 rgSCHCmnSpsDlCqiIndHndlr(cell, ue, timingInfo);
16920 /* Call Specific scheduler to process on dlCqiInd */
16922 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
16924 cellSch->apisEmtcDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16929 cellSch->apisDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16932 #ifdef RG_PFS_STATS
16933 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].avgCqi +=
16934 ueDl->mimoInfo.cwInfo[0].cqi;
16935 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].totalCqiOcc++;
16939 ueDl->avgCqi += ueDl->mimoInfo.cwInfo[0].cqi;
16940 ueDl->numCqiOccns++;
16941 if (ueDl->mimoInfo.ri == 1)
16952 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16953 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16954 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw0Cqi ++;
16955 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw1Cqi ++;
16956 cell->tenbStats->sch.dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16957 cell->tenbStats->sch.dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16958 cell->tenbStats->sch.dlNumCw0Cqi ++;
16959 cell->tenbStats->sch.dlNumCw1Cqi ++;
16966 * @brief This function calculates the wideband CQI from SNR
16967 * reported for each RB.
16971 * Function: rgSCHCmnCalcWcqiFrmSnr
16972 * Purpose: Wideband CQI calculation from SNR
16974 * Invoked by: RG SCH
16976 * @param[in] RgSchCellCb *cell
16977 * @param[in] TfuSrsRpt *srsRpt,
16978 * @return Wideband CQI
16982 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr
16988 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr(cell,srsRpt)
16993 U8 wideCqi=1; /*Calculated value from SNR*/
16994 TRC2(rgSCHCmnCalcWcqiFrmSnr);
16995 /*Need to map a certain SNR with a WideCQI value.
16996 * The CQI calculation is still primitive. Further, need to
16997 * use a improvized method for calculating WideCQI from SNR*/
16998 if (srsRpt->snr[0] <=50)
17002 else if (srsRpt->snr[0]>=51 && srsRpt->snr[0] <=100)
17006 else if (srsRpt->snr[0]>=101 && srsRpt->snr[0] <=150)
17010 else if (srsRpt->snr[0]>=151 && srsRpt->snr[0] <=200)
17014 else if (srsRpt->snr[0]>=201 && srsRpt->snr[0] <=250)
17023 }/*rgSCHCmnCalcWcqiFrmSnr*/
17027 * @brief This function Updates the SRS for the UE.
17031 * Function: rgSCHCmnSrsInd
17032 * Purpose: Updates the UL SRS for the UE
17036 * @param[in] RgSchCellCb *cell
17037 * @param[in] RgSchUeCb *ue
17038 * @param[in] TfuSrsRpt *srsRpt,
17043 PUBLIC Void rgSCHCmnSrsInd
17048 CmLteTimingInfo timingInfo
17051 PUBLIC Void rgSCHCmnSrsInd(cell, ue, srsRpt, timingInfo)
17055 CmLteTimingInfo timingInfo;
17058 U8 wideCqi; /*Calculated value from SNR*/
17059 U32 recReqTime; /*Received Time in TTI*/
17060 TRC2(rgSCHCmnSrsInd);
17062 recReqTime = (timingInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) + timingInfo.subframe;
17063 ue->srsCb.selectedAnt = (recReqTime/ue->srsCb.peri)%2;
17064 if(srsRpt->wideCqiPres)
17066 wideCqi = srsRpt->wideCqi;
17070 wideCqi = rgSCHCmnCalcWcqiFrmSnr(cell, srsRpt);
17072 rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi);
17074 }/*rgSCHCmnSrsInd*/
17079 * @brief This function is a handler for TA report for an UE.
17083 * Function: rgSCHCmnDlTARpt
17084 * Purpose: Determine based on UE_IDLE_TIME threshold,
17085 * whether UE needs to be Linked to the scheduler's TA list OR
17086 * if it needs a PDCCH Order.
17091 * @param[in] RgSchCellCb *cell
17092 * @param[in] RgSchUeCb *ue
17097 PUBLIC Void rgSCHCmnDlTARpt
17103 PUBLIC Void rgSCHCmnDlTARpt(cell, ue)
17108 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17109 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
17110 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
17111 CmLListCp poInactvLst;
17113 TRC2(rgSCHCmnDlTARpt);
17115 /* RACHO: If UE idle time is more than threshold, then
17116 * set its poInactv pdcch order inactivity */
17117 /* Fix : syed Ignore if TaTmr is not configured */
17118 if ((ue->dl.taCb.cfgTaTmr) && (rgSCHCmnUeIdleExdThrsld(cell, ue) == ROK))
17120 U32 prevDlMsk = ue->dl.dlInactvMask;
17121 U32 prevUlMsk = ue->ul.ulInactvMask;
17122 ue->dl.dlInactvMask |= RG_PDCCHODR_INACTIVE;
17123 ue->ul.ulInactvMask |= RG_PDCCHODR_INACTIVE;
17124 /* Indicate Specific scheduler for this UEs inactivity */
17125 cmLListInit(&poInactvLst);
17126 cmLListAdd2Tail(&poInactvLst, &ueDl->rachInfo.inActUeLnk);
17127 ueDl->rachInfo.inActUeLnk.node = (PTR)ue;
17128 /* Send inactivate ind only if not already sent */
17129 if (prevDlMsk == 0)
17131 cellSch->apisDl->rgSCHDlInactvtUes(cell, &poInactvLst);
17133 if (prevUlMsk == 0)
17135 cellSch->apisUl->rgSCHUlInactvtUes(cell, &poInactvLst);
17140 /* Fix: ccpu00124009 Fix for loop in the linked list "cellDl->taLst" */
17141 if (!ue->dlTaLnk.node)
17144 if(cell->emtcEnable)
17148 rgSCHEmtcAddToTaLst(cellDl,ue);
17155 cmLListAdd2Tail(&cellDl->taLst, &ue->dlTaLnk);
17156 ue->dlTaLnk.node = (PTR)ue;
17161 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
17162 "<TA>TA duplicate entry attempt failed: UEID:%u",
17171 * @brief Indication of UL CQI.
17175 * Function : rgSCHCmnFindUlCqiUlTxAnt
17177 * - Finds the Best Tx Antenna amongst the CQIs received
17178 * from Two Tx Antennas.
17180 * @param[in] RgSchCellCb *cell
17181 * @param[in] RgSchUeCb *ue
17182 * @param[in] U8 wideCqi
17186 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt
17193 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi)
17199 ue->validTxAnt = 1;
17201 } /* rgSCHCmnFindUlCqiUlTxAnt */
17205 * @brief Indication of UL CQI.
17209 * Function : rgSCHCmnUlCqiInd
17211 * - Updates uplink CQI information for the UE. Computes and
17212 * stores the lowest CQI of CQIs reported in all subbands.
17214 * @param[in] RgSchCellCb *cell
17215 * @param[in] RgSchUeCb *ue
17216 * @param[in] TfuUlCqiRpt *ulCqiInfo
17220 PUBLIC Void rgSCHCmnUlCqiInd
17224 TfuUlCqiRpt *ulCqiInfo
17227 PUBLIC Void rgSCHCmnUlCqiInd(cell, ue, ulCqiInfo)
17230 TfuUlCqiRpt *ulCqiInfo;
17233 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17234 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17239 #if (defined(SCH_STATS) || defined(TENB_STATS))
17240 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
17243 TRC2(rgSCHCmnUlCqiInd);
17244 /* consider inputs from SRS handlers about SRS occassions
17245 * in determining the UL TX Antenna selection */
17246 ueUl->crntUlCqi[0] = ulCqiInfo->wideCqi;
17248 ueUl->validUlCqi = ueUl->crntUlCqi[0];
17249 ue->validTxAnt = 0;
17251 iTbsNew = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][ueUl->validUlCqi];
17252 previTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
17254 if (RG_ITBS_DIFF(iTbsNew, previTbs) > 5)
17256 /* Ignore this iTBS report and mark that last iTBS report was */
17257 /* ignored so that subsequently we reset the LA algorithm */
17258 ueUl->ulLaCb.lastiTbsIgnored = TRUE;
17262 if (ueUl->ulLaCb.lastiTbsIgnored != TRUE)
17264 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17265 (80 * ueUl->ulLaCb.cqiBasediTbs))/100;
17269 /* Reset the LA as iTbs in use caught up with the value */
17270 /* reported by UE. */
17271 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17272 (80 * previTbs * 100))/100;
17273 ueUl->ulLaCb.deltaiTbs = 0;
17274 ueUl->ulLaCb.lastiTbsIgnored = FALSE;
17279 rgSCHPwrUlCqiInd(cell, ue);
17281 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17283 rgSCHCmnSpsUlCqiInd(cell, ue);
17286 /* Applicable to only some schedulers */
17288 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
17290 cellSch->apisEmtcUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17295 cellSch->apisUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17299 ueUl->numCqiOccns++;
17300 ueUl->avgCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17305 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17306 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulNumCqi ++;
17307 cell->tenbStats->sch.ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17308 cell->tenbStats->sch.ulNumCqi ++;
17313 } /* rgSCHCmnUlCqiInd */
17316 * @brief Returns HARQ proc for which data expected now.
17320 * Function: rgSCHCmnUlHqProcForUe
17321 * Purpose: This function returns the harq process for
17322 * which data is expected in the current subframe.
17323 * It does not validate that the HARQ process
17324 * has an allocation.
17328 * @param[in] RgSchCellCb *cell
17329 * @param[in] CmLteTimingInfo frm
17330 * @param[in] RgSchUeCb *ue
17331 * @param[out] RgSchUlHqProcCb **procRef
17335 PUBLIC Void rgSCHCmnUlHqProcForUe
17338 CmLteTimingInfo frm,
17340 RgSchUlHqProcCb **procRef
17343 PUBLIC Void rgSCHCmnUlHqProcForUe(cell, frm, ue, procRef)
17345 CmLteTimingInfo frm;
17347 RgSchUlHqProcCb **procRef;
17351 U8 procId = rgSCHCmnGetUlHqProcIdx(&frm, cell);
17353 TRC2(rgSCHCmnUlHqProcForUe);
17355 *procRef = rgSCHUhmGetUlHqProc(cell, ue, procId);
17357 *procRef = rgSCHUhmGetUlProcByTime(cell, ue, frm);
17364 * @brief Update harq process for allocation.
17368 * Function : rgSCHCmnUpdUlHqProc
17370 * This function is invoked when harq process
17371 * control block is now in a new memory location
17372 * thus requiring a pointer/reference update.
17374 * @param[in] RgSchCellCb *cell
17375 * @param[in] RgSchUlHqProcCb *curProc
17376 * @param[in] RgSchUlHqProcCb *oldProc
17382 PUBLIC S16 rgSCHCmnUpdUlHqProc
17385 RgSchUlHqProcCb *curProc,
17386 RgSchUlHqProcCb *oldProc
17389 PUBLIC S16 rgSCHCmnUpdUlHqProc(cell, curProc, oldProc)
17391 RgSchUlHqProcCb *curProc;
17392 RgSchUlHqProcCb *oldProc;
17395 TRC2(rgSCHCmnUpdUlHqProc);
17399 #if (ERRCLASS & ERRCLS_DEBUG)
17400 if (curProc->alloc == NULLP)
17405 curProc->alloc->hqProc = curProc;
17407 } /* rgSCHCmnUpdUlHqProc */
17410 /*MS_WORKAROUND for CR FIXME */
17412 * @brief Hsndles BSR timer expiry
17416 * Function : rgSCHCmnBsrTmrExpry
17418 * This function is invoked when periodic BSR timer expires for a UE.
17420 * @param[in] RgSchUeCb *ue
17426 PUBLIC S16 rgSCHCmnBsrTmrExpry
17431 PUBLIC S16 rgSCHCmnBsrTmrExpry(ueCb)
17435 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ueCb->cell);
17437 TRC2(rgSCHCmnBsrTmrExpry)
17439 ueCb->isSrGrant = TRUE;
17442 emtcStatsUlBsrTmrTxp++;
17446 if(ueCb->cell->emtcEnable)
17450 cellSch->apisEmtcUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17457 cellSch->apisUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17464 * @brief Short BSR update.
17468 * Function : rgSCHCmnUpdBsrShort
17470 * This functions does requisite updates to handle short BSR reporting.
17472 * @param[in] RgSchCellCb *cell
17473 * @param[in] RgSchUeCb *ue
17474 * @param[in] RgSchLcgCb *ulLcg
17475 * @param[in] U8 bsr
17476 * @param[out] RgSchErrInfo *err
17482 PUBLIC S16 rgSCHCmnUpdBsrShort
17491 PUBLIC S16 rgSCHCmnUpdBsrShort(cell, ue, ulLcg, bsr, err)
17501 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17503 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17504 RgSchCmnLcg *cmnLcg = NULLP;
17509 TRC2(rgSCHCmnUpdBsrShort);
17511 if (!RGSCH_LCG_ISCFGD(ulLcg))
17513 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17516 for (lcgCnt=0; lcgCnt<4; lcgCnt++)
17519 /* Set BS of all other LCGs to Zero.
17520 If Zero BSR is reported in Short BSR include this LCG too */
17521 if ((lcgCnt != ulLcg->lcgId) ||
17522 (!bsr && !ueUl->hqEnt.numBusyHqProcs))
17524 /* If old BO is zero do nothing */
17525 if(((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs != 0)
17527 for(idx = 0; idx < ue->ul.lcgArr[lcgCnt].numLch; idx++)
17529 if((ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount) &&
17530 (ue->ulActiveLCs & (1 <<
17531 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1))))
17534 ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount--;
17535 ue->ulActiveLCs &= ~(1 <<
17536 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1));
17542 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgCnt]))
17544 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs = 0;
17545 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->reportedBs = 0;
17550 if(ulLcg->lcgId && bsr && (((RgSchCmnLcg *)(ulLcg->sch))->bs == 0))
17552 for(idx = 0; idx < ulLcg->numLch; idx++)
17555 if (!(ue->ulActiveLCs & (1 << (ulLcg->lcArray[idx]->qciCb->qci -1))))
17557 ulLcg->lcArray[idx]->qciCb->ulUeCount++;
17558 ue->ulActiveLCs |= (1 << (ulLcg->lcArray[idx]->qciCb->qci -1));
17563 /* Resetting the nonGbrLcgBs info here */
17564 ue->ul.nonGbrLcgBs = 0;
17565 ue->ul.nonLcg0Bs = 0;
17567 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17569 if (TRUE == ue->ul.useExtBSRSizes)
17571 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17575 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17577 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17579 /* TBD check for effGbr != 0 */
17580 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17582 else if (0 == ulLcg->lcgId)
17584 /* This is added for handling LCG0 */
17585 cmnLcg->bs = cmnLcg->reportedBs;
17589 /* Update non GBR LCG's BS*/
17590 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17591 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17593 ue->ul.totalBsr = cmnLcg->bs;
17596 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (bsr == 0))
17598 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17602 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17604 rgSCHCmnSpsBsrRpt(cell, ue, ulLcg);
17607 rgSCHCmnUpdUlCompEffBsr(ue);
17610 if(cell->emtcEnable)
17614 cellSch->apisEmtcUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17621 cellSch->apisUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17625 if (ue->ul.isUlCaEnabled && ue->numSCells)
17627 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17629 #ifndef PAL_ENABLE_UL_CA
17630 if((ue->cellInfo[sCellIdx] != NULLP) &&
17631 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17633 if(ue->cellInfo[sCellIdx] != NULLP)
17636 cellSch->apisUl->rgSCHUpdBsrShort(ue->cellInfo[sCellIdx]->cell,
17647 * @brief Truncated BSR update.
17651 * Function : rgSCHCmnUpdBsrTrunc
17653 * This functions does required updates to handle truncated BSR report.
17656 * @param[in] RgSchCellCb *cell
17657 * @param[in] RgSchUeCb *ue
17658 * @param[in] RgSchLcgCb *ulLcg
17659 * @param[in] U8 bsr
17660 * @param[out] RgSchErrInfo *err
17666 PUBLIC S16 rgSCHCmnUpdBsrTrunc
17675 PUBLIC S16 rgSCHCmnUpdBsrTrunc(cell, ue, ulLcg, bsr, err)
17683 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17684 RgSchCmnLcg *cmnLcg = NULLP;
17690 TRC2(rgSCHCmnUpdBsrTrunc);
17692 if (!RGSCH_LCG_ISCFGD(ulLcg))
17694 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17697 /* set all higher prio lcgs bs to 0 and update this lcgs bs and
17698 total bsr= sumofall lcgs bs */
17701 for (cnt = ulLcg->lcgId-1; cnt >= 0; cnt--)
17704 /* If Existing BO is zero the don't do anything */
17705 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs != 0)
17707 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17710 if((ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount) &&
17711 (ue->ulActiveLCs & (1 <<
17712 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17714 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount--;
17715 ue->ulActiveLCs &= ~(1 <<
17716 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17721 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs = 0;
17722 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->reportedBs = 0;
17727 for (cnt = ulLcg->lcgId; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17729 if (ulLcg->lcgId == 0)
17733 /* If Existing BO is zero the don't do anything */
17734 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs == 0)
17736 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17739 if (!(ue->ulActiveLCs & (1 <<
17740 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17742 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount++;
17743 ue->ulActiveLCs |= (1 <<
17744 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17750 ue->ul.nonGbrLcgBs = 0;
17751 ue->ul.nonLcg0Bs = 0;
17752 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17753 if (TRUE == ue->ul.useExtBSRSizes)
17755 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17759 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17761 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17763 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17765 else if(ulLcg->lcgId == 0)
17767 /* This is for handeling LCG0 */
17768 cmnLcg->bs = cmnLcg->reportedBs;
17772 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
17773 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17775 ue->ul.totalBsr = cmnLcg->bs;
17777 for (cnt = ulLcg->lcgId+1; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17779 /* TODO: The bs for the other LCGs may be stale because some or all of
17780 * the part of bs may have been already scheduled/data received. Please
17781 * consider this when truncated BSR is tested/implemented */
17782 ue->ul.totalBsr += ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs;
17785 rgSCHCmnUpdUlCompEffBsr(ue);
17788 if(cell->emtcEnable)
17792 cellSch->apisEmtcUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17799 cellSch->apisUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17803 if (ue->ul.isUlCaEnabled && ue->numSCells)
17805 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17807 #ifndef PAL_ENABLE_UL_CA
17808 if((ue->cellInfo[sCellIdx] != NULLP) &&
17809 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17811 if(ue->cellInfo[sCellIdx] != NULLP)
17814 cellSch->apisUl->rgSCHUpdBsrTrunc(ue->cellInfo[sCellIdx]->cell, ue, ulLcg, bsr);
17824 * @brief Long BSR update.
17828 * Function : rgSCHCmnUpdBsrLong
17830 * - Update BSRs for all configured LCGs.
17831 * - Update priority of LCGs if needed.
17832 * - Update UE's position within/across uplink scheduling queues.
17835 * @param[in] RgSchCellCb *cell
17836 * @param[in] RgSchUeCb *ue
17837 * @param[in] U8 bsArr[]
17838 * @param[out] RgSchErrInfo *err
17844 PUBLIC S16 rgSCHCmnUpdBsrLong
17852 PUBLIC S16 rgSCHCmnUpdBsrLong(cell, ue, bsArr, err)
17859 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17860 U32 tmpBsArr[4] = {0, 0, 0, 0};
17868 TRC2(rgSCHCmnUpdBsrLong);
17871 for(idx1 = 1; idx1 < RGSCH_MAX_LCG_PER_UE; idx1++)
17873 /* If Old BO is non zero then do nothing */
17874 if ((((RgSchCmnLcg *)(ue->ul.lcgArr[idx1].sch))->bs == 0)
17877 for(idx2 = 0; idx2 < ue->ul.lcgArr[idx1].numLch; idx2++)
17880 if (!(ue->ulActiveLCs & (1 <<
17881 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1))))
17883 ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->ulUeCount++;
17884 ue->ulActiveLCs |= (1 <<
17885 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1));
17891 ue->ul.nonGbrLcgBs = 0;
17892 ue->ul.nonLcg0Bs = 0;
17894 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[0]))
17896 if (TRUE == ue->ul.useExtBSRSizes)
17898 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnExtBsrTbl[bsArr[0]];
17899 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnExtBsrTbl[bsArr[0]];
17900 tmpBsArr[0] = rgSchCmnExtBsrTbl[bsArr[0]];
17904 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnBsrTbl[bsArr[0]];
17905 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnBsrTbl[bsArr[0]];
17906 tmpBsArr[0] = rgSchCmnBsrTbl[bsArr[0]];
17909 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
17911 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
17913 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
17915 if (TRUE == ue->ul.useExtBSRSizes)
17917 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsArr[lcgId]];
17921 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsArr[lcgId]];
17923 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17925 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17926 tmpBsArr[lcgId] = cmnLcg->bs;
17930 nonGbrBs += cmnLcg->reportedBs;
17931 tmpBsArr[lcgId] = cmnLcg->reportedBs;
17932 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17936 ue->ul.nonGbrLcgBs = RGSCH_MIN(nonGbrBs,ue->ul.effAmbr);
17938 ue->ul.totalBsr = tmpBsArr[0] + tmpBsArr[1] + tmpBsArr[2] + tmpBsArr[3];
17940 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (ue->ul.totalBsr == 0))
17942 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17947 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE) /* SPS_FIX */
17949 if(ue->ul.totalBsr - tmpBsArr[1] == 0)
17950 {/* Updaing the BSR to SPS only if LCG1 BS is present in sps active state */
17951 rgSCHCmnSpsBsrRpt(cell, ue, &ue->ul.lcgArr[1]);
17955 rgSCHCmnUpdUlCompEffBsr(ue);
17958 if(cell->emtcEnable)
17962 cellSch->apisEmtcUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17969 cellSch->apisUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17973 if (ue->ul.isUlCaEnabled && ue->numSCells)
17975 for(U8 idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
17977 #ifndef PAL_ENABLE_UL_CA
17978 if((ue->cellInfo[idx] != NULLP) &&
17979 (ue->cellInfo[idx]->sCellState == RG_SCH_SCELL_ACTIVE))
17981 if(ue->cellInfo[idx] != NULLP)
17984 cellSch->apisUl->rgSCHUpdBsrLong(ue->cellInfo[idx]->cell, ue, bsArr);
17994 * @brief PHR update.
17998 * Function : rgSCHCmnUpdExtPhr
18000 * Updates extended power headroom information for an UE.
18002 * @param[in] RgSchCellCb *cell
18003 * @param[in] RgSchUeCb *ue
18004 * @param[in] U8 phr
18005 * @param[out] RgSchErrInfo *err
18011 PUBLIC S16 rgSCHCmnUpdExtPhr
18015 RgInfExtPhrCEInfo *extPhr,
18019 PUBLIC S16 rgSCHCmnUpdExtPhr(cell, ue, extPhr, err)
18022 RgInfExtPhrCEInfo *extPhr;
18026 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18027 RgSchCmnAllocRecord *allRcd;
18028 CmLList *node = ueUl->ulAllocLst.last;
18031 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18033 TRC2(rgSCHCmnUpdExtPhr);
18039 allRcd = (RgSchCmnAllocRecord *)node->node;
18041 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18043 rgSCHPwrUpdExtPhr(cell, ue, extPhr, allRcd);
18048 if(ulSpsUe->isUlSpsActv)
18050 rgSCHCmnSpsPhrInd(cell,ue);
18055 } /* rgSCHCmnUpdExtPhr */
18061 * @brief PHR update.
18065 * Function : rgSCHCmnUpdPhr
18067 * Updates power headroom information for an UE.
18069 * @param[in] RgSchCellCb *cell
18070 * @param[in] RgSchUeCb *ue
18071 * @param[in] U8 phr
18072 * @param[out] RgSchErrInfo *err
18078 PUBLIC S16 rgSCHCmnUpdPhr
18086 PUBLIC S16 rgSCHCmnUpdPhr(cell, ue, phr, err)
18093 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18094 RgSchCmnAllocRecord *allRcd;
18095 CmLList *node = ueUl->ulAllocLst.last;
18098 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18100 TRC2(rgSCHCmnUpdPhr);
18106 allRcd = (RgSchCmnAllocRecord *)node->node;
18108 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18110 rgSCHPwrUpdPhr(cell, ue, phr, allRcd, RG_SCH_CMN_PWR_USE_CFG_MAX_PWR);
18115 if(ulSpsUe->isUlSpsActv)
18117 rgSCHCmnSpsPhrInd(cell,ue);
18122 } /* rgSCHCmnUpdPhr */
18125 * @brief UL grant for contention resolution.
18129 * Function : rgSCHCmnContResUlGrant
18131 * Add UE to another queue specifically for CRNTI based contention
18135 * @param[in] RgSchUeCb *ue
18136 * @param[out] RgSchErrInfo *err
18142 PUBLIC S16 rgSCHCmnContResUlGrant
18149 PUBLIC S16 rgSCHCmnContResUlGrant(cell, ue, err)
18155 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18156 TRC2(rgSCHCmnContResUlGrant);
18159 if(cell->emtcEnable)
18163 cellSch->apisEmtcUl->rgSCHContResUlGrant(cell, ue);
18170 cellSch->apisUl->rgSCHContResUlGrant(cell, ue);
18176 * @brief SR reception handling.
18180 * Function : rgSCHCmnSrRcvd
18182 * - Update UE's position within/across uplink scheduling queues
18183 * - Update priority of LCGs if needed.
18185 * @param[in] RgSchCellCb *cell
18186 * @param[in] RgSchUeCb *ue
18187 * @param[in] CmLteTimingInfo frm
18188 * @param[out] RgSchErrInfo *err
18194 PUBLIC S16 rgSCHCmnSrRcvd
18198 CmLteTimingInfo frm,
18202 PUBLIC S16 rgSCHCmnSrRcvd(cell, ue, frm, err)
18205 CmLteTimingInfo frm;
18209 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18210 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18211 CmLList *node = ueUl->ulAllocLst.last;
18213 TRC2(rgSCHCmnSrRcvd);
18216 emtcStatsUlTomSrInd++;
18219 RGSCH_INCR_SUB_FRAME(frm, 1); /* 1 TTI after the time SR was sent */
18222 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)node->node;
18223 if (RGSCH_TIMEINFO_SAME(frm, allRcd->allocTime))
18229 //TODO_SID Need to check when it is getting triggered
18230 ue->isSrGrant = TRUE;
18232 if(cell->emtcEnable)
18236 cellSch->apisEmtcUl->rgSCHSrRcvd(cell, ue);
18243 cellSch->apisUl->rgSCHSrRcvd(cell, ue);
18249 * @brief Returns first uplink allocation to send reception
18254 * Function: rgSCHCmnFirstRcptnReq(cell)
18255 * Purpose: This function returns the first uplink allocation
18256 * (or NULLP if there is none) in the subframe
18257 * in which is expected to prepare and send reception
18262 * @param[in] RgSchCellCb *cell
18263 * @return RgSchUlAlloc*
18266 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq
18271 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq(cell)
18275 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18277 RgSchUlAlloc* alloc = NULLP;
18279 TRC2(rgSCHCmnFirstRcptnReq);
18281 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18283 RgSchUlSf* sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18284 alloc = rgSCHUtlUlAllocFirst(sf);
18286 if (alloc && alloc->hqProc == NULLP)
18288 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18296 * @brief Returns first uplink allocation to send reception
18301 * Function: rgSCHCmnNextRcptnReq(cell)
18302 * Purpose: This function returns the next uplink allocation
18303 * (or NULLP if there is none) in the subframe
18304 * in which is expected to prepare and send reception
18309 * @param[in] RgSchCellCb *cell
18310 * @return RgSchUlAlloc*
18313 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq
18316 RgSchUlAlloc *alloc
18319 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq(cell, alloc)
18321 RgSchUlAlloc *alloc;
18324 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18326 //RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18328 TRC2(rgSCHCmnNextRcptnReq);
18330 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18332 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18334 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18335 if (alloc && alloc->hqProc == NULLP)
18337 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18348 * @brief Collates DRX enabled UE's scheduled in this SF
18352 * Function: rgSCHCmnDrxStrtInActvTmrInUl(cell)
18353 * Purpose: This function collates the link
18354 * of UE's scheduled in this SF who
18355 * have drx enabled. It then calls
18356 * DRX specific function to start/restart
18357 * inactivity timer in Ul
18361 * @param[in] RgSchCellCb *cell
18365 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl
18370 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl(cell)
18374 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18375 RgSchUlSf *sf = &(cellUl->ulSfArr[cellUl->schdIdx]);
18376 RgSchUlAlloc *alloc = rgSCHUtlUlAllocFirst(sf);
18381 TRC2(rgSCHCmnDrxStrtInActvTmrInUl);
18383 cmLListInit(&ulUeLst);
18391 if (!(alloc->grnt.isRtx) && ueCb->isDrxEnabled && !(ueCb->isSrGrant)
18393 /* ccpu00139513- DRX inactivity timer should not be started for
18394 * UL SPS occasions */
18395 && (alloc->hqProc->isSpsOccnHqP == FALSE)
18399 cmLListAdd2Tail(&ulUeLst,&(ueCb->ulDrxInactvTmrLnk));
18400 ueCb->ulDrxInactvTmrLnk.node = (PTR)ueCb;
18404 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18407 (Void)rgSCHDrxStrtInActvTmr(cell,&ulUeLst,RG_SCH_DRX_UL);
18414 * @brief Returns first uplink allocation to send HARQ feedback
18419 * Function: rgSCHCmnFirstHqFdbkAlloc
18420 * Purpose: This function returns the first uplink allocation
18421 * (or NULLP if there is none) in the subframe
18422 * for which it is expected to prepare and send HARQ
18427 * @param[in] RgSchCellCb *cell
18428 * @param[in] U8 idx
18429 * @return RgSchUlAlloc*
18432 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc
18438 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc(cell, idx)
18443 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18445 RgSchUlAlloc *alloc = NULLP;
18447 TRC2(rgSCHCmnFirstHqFdbkAlloc);
18449 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18451 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18452 alloc = rgSCHUtlUlAllocFirst(sf);
18454 while (alloc && (alloc->hqProc == NULLP))
18456 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18464 * @brief Returns next allocation to send HARQ feedback for.
18468 * Function: rgSCHCmnNextHqFdbkAlloc(cell)
18469 * Purpose: This function returns the next uplink allocation
18470 * (or NULLP if there is none) in the subframe
18471 * for which HARQ feedback needs to be sent.
18475 * @param[in] RgSchCellCb *cell
18476 * @return RgSchUlAlloc*
18479 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc
18482 RgSchUlAlloc *alloc,
18486 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc(cell, alloc, idx)
18488 RgSchUlAlloc *alloc;
18492 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18493 TRC2(rgSCHCmnNextHqFdbkAlloc);
18495 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18497 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18499 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18500 while (alloc && (alloc->hqProc == NULLP))
18502 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18512 /***********************************************************
18514 * Func : rgSCHCmnUlGetITbsFrmIMcs
18516 * Desc : Returns the Itbs that is mapped to an Imcs
18517 * for the case of uplink.
18525 **********************************************************/
18527 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs
18532 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs(iMcs)
18536 TRC2(rgSCHCmnUlGetITbsFrmIMcs);
18538 RETVALUE(rgUlIMcsTbl[iMcs].iTbs);
18541 /***********************************************************
18543 * Func : rgSCHCmnUlGetIMcsFrmITbs
18545 * Desc : Returns the Imcs that is mapped to an Itbs
18546 * for the case of uplink.
18550 * Notes: For iTbs 19, iMcs is dependant on modulation order.
18551 * Refer to 36.213, Table 8.6.1-1 and 36.306 Table 4.1-2
18552 * for UE capability information
18556 **********************************************************/
18558 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs
18561 CmLteUeCategory ueCtg
18564 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg)
18566 CmLteUeCategory ueCtg;
18570 TRC2(rgSCHCmnUlGetIMcsFrmITbs);
18576 /*a higher layer can force a 64QAM UE to transmit at 16QAM.
18577 * We currently do not support this. Once the support for such
18578 * is added, ueCtg should be replaced by current transmit
18579 * modulation configuration.Refer to 36.213 -8.6.1
18581 else if ( iTbs < 19 )
18585 else if ((iTbs == 19) && (ueCtg != CM_LTE_UE_CAT_5))
18595 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
18596 was seen when IMCS exceeds 20 on T2k TDD*/
18606 /***********************************************************
18608 * Func : rgSCHCmnUlMinTbBitsForITbs
18610 * Desc : Returns the minimum number of bits that can
18611 * be given as grant for a specific CQI.
18619 **********************************************************/
18621 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs
18623 RgSchCmnUlCell *cellUl,
18627 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs)
18628 RgSchCmnUlCell *cellUl;
18632 TRC2(rgSCHCmnUlMinTbBitsForITbs);
18634 RGSCH_ARRAY_BOUND_CHECK(0, rgTbSzTbl[0], iTbs);
18636 RETVALUE(rgTbSzTbl[0][iTbs][cellUl->sbSize-1]);
18639 /***********************************************************
18641 * Func : rgSCHCmnUlSbAlloc
18643 * Desc : Given a required 'number of subbands' and a hole,
18644 * returns a suitable alloc such that the subband
18645 * allocation size is valid
18649 * Notes: Does not assume either passed numSb or hole size
18650 * to be valid for allocation, and hence arrives at
18651 * an acceptable value.
18654 **********************************************************/
18656 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc
18663 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc(sf, numSb, hole)
18669 U8 holeSz; /* valid hole size */
18670 RgSchUlAlloc *alloc;
18671 TRC2(rgSCHCmnUlSbAlloc);
18673 if ((holeSz = rgSchCmnMult235Tbl[hole->num].prvMatch) == hole->num)
18675 numSb = rgSchCmnMult235Tbl[numSb].match;
18676 if (numSb >= holeSz)
18678 alloc = rgSCHUtlUlAllocGetCompHole(sf, hole);
18682 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18687 if (numSb < holeSz)
18689 numSb = rgSchCmnMult235Tbl[numSb].match;
18693 numSb = rgSchCmnMult235Tbl[numSb].prvMatch;
18696 if ( numSb >= holeSz )
18700 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18706 * @brief To fill the RgSchCmnUeUlAlloc structure of UeCb.
18710 * Function: rgSCHCmnUlUeFillAllocInfo
18711 * Purpose: Specific scheduler to call this API to fill the alloc
18714 * Invoked by: Scheduler
18716 * @param[in] RgSchCellCb *cell
18717 * @param[out] RgSchUeCb *ue
18721 PUBLIC Void rgSCHCmnUlUeFillAllocInfo
18727 PUBLIC Void rgSCHCmnUlUeFillAllocInfo(cell, ue)
18732 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18733 RgSchCmnUeUlAlloc *ulAllocInfo;
18734 RgSchCmnUlUe *ueUl;
18736 TRC2(rgSCHCmnUlUeFillAllocInfo);
18738 ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18739 ulAllocInfo = &ueUl->alloc;
18741 /* Fill alloc structure */
18742 rgSCHCmnUlAllocFillTpc(cell, ue, ulAllocInfo->alloc);
18743 rgSCHCmnUlAllocFillNdmrs(cellUl, ulAllocInfo->alloc);
18744 rgSCHCmnUlAllocLnkHqProc(ue, ulAllocInfo->alloc, ulAllocInfo->alloc->hqProc,
18745 ulAllocInfo->alloc->hqProc->isRetx);
18747 rgSCHCmnUlFillPdcchWithAlloc(ulAllocInfo->alloc->pdcch,
18748 ulAllocInfo->alloc, ue);
18749 /* Recording information about this allocation */
18750 rgSCHCmnUlRecordUeAlloc(cell, ue);
18752 /* Update the UE's outstanding allocation */
18753 if (!ulAllocInfo->alloc->hqProc->isRetx)
18755 rgSCHCmnUlUpdOutStndAlloc(cell, ue, ulAllocInfo->allocdBytes);
18762 * @brief Update the UEs outstanding alloc based on the BSR report's timing.
18767 * Function: rgSCHCmnUpdUlCompEffBsr
18768 * Purpose: Clear off all the allocations from outstanding allocation that
18769 * are later than or equal to BSR timing information (stored in UEs datIndTime).
18771 * Invoked by: Scheduler
18773 * @param[in] RgSchUeCb *ue
18777 PRIVATE Void rgSCHCmnUpdUlCompEffBsr
18782 PRIVATE Void rgSCHCmnUpdUlCompEffBsr(ue)
18786 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,ue->cell);
18787 CmLList *node = ueUl->ulAllocLst.last;
18788 RgSchCmnAllocRecord *allRcd;
18789 U32 outStndAlloc=0;
18790 U32 nonLcg0OutStndAllocBs=0;
18793 RgSchCmnLcg *cmnLcg = NULLP;
18794 TRC2(rgSCHCmnUpdUlCompEffBsr);
18798 allRcd = (RgSchCmnAllocRecord *)node->node;
18799 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18808 allRcd = (RgSchCmnAllocRecord *)node->node;
18810 outStndAlloc += allRcd->alloc;
18813 cmnLcg = (RgSchCmnLcg *)(ue->ul.lcgArr[0].sch);
18814 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
18815 if (cmnLcg->bs > outStndAlloc)
18817 cmnLcg->bs -= outStndAlloc;
18818 ue->ul.minReqBytes = cmnLcg->bs;
18823 nonLcg0OutStndAllocBs = outStndAlloc - cmnLcg->bs;
18827 for(lcgId = 1;lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
18829 if(RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
18831 cmnLcg = ((RgSchCmnLcg *) (ue->ul.lcgArr[lcgId].sch));
18832 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
18834 nonLcg0Bsr += cmnLcg->bs;
18838 nonLcg0Bsr += ue->ul.nonGbrLcgBs;
18839 if (nonLcg0OutStndAllocBs > nonLcg0Bsr)
18845 nonLcg0Bsr -= nonLcg0OutStndAllocBs;
18847 ue->ul.nonLcg0Bs = nonLcg0Bsr;
18848 /* Cap effBsr with nonLcg0Bsr and append lcg0 bs.
18849 * nonLcg0Bsr limit applies only to lcg1,2,3 */
18850 /* better be handled in individual scheduler */
18851 ue->ul.effBsr = nonLcg0Bsr +\
18852 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
18857 * @brief Records information about the current allocation.
18861 * Function: rgSCHCmnUlRecordUeAlloc
18862 * Purpose: Records information about the curent allocation.
18863 * This includes the allocated bytes, as well
18864 * as some power information.
18866 * Invoked by: Scheduler
18868 * @param[in] RgSchCellCb *cell
18869 * @param[in] RgSchUeCb *ue
18873 PUBLIC Void rgSCHCmnUlRecordUeAlloc
18879 PUBLIC Void rgSCHCmnUlRecordUeAlloc(cell, ue)
18885 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18887 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18888 CmLListCp *lst = &ueUl->ulAllocLst;
18889 CmLList *node = ueUl->ulAllocLst.first;
18890 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18891 RgSchCmnUeUlAlloc *ulAllocInfo = &ueUl->alloc;
18892 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
18893 TRC2(rgSCHCmnUlRecordUeAlloc);
18895 cmLListDelFrm(lst, &allRcd->lnk);
18897 /* To the crntTime, add the MIN time at which UE will
18898 * actually send the BSR i.e DELTA+4 */
18899 allRcd->allocTime = cell->crntTime;
18900 /*ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
18902 if(ue->isEmtcUe == TRUE)
18904 RGSCH_INCR_SUB_FRAME_EMTC(allRcd->allocTime,
18905 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18910 RGSCH_INCR_SUB_FRAME(allRcd->allocTime,
18911 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18914 allRcd->allocTime = cellUl->schdTime;
18916 cmLListAdd2Tail(lst, &allRcd->lnk);
18918 /* Filling in the parameters to be recorded */
18919 allRcd->alloc = ulAllocInfo->allocdBytes;
18920 //allRcd->numRb = ulAllocInfo->alloc->grnt.numRb;
18921 allRcd->numRb = (ulAllocInfo->alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
18922 /*Recording the UL CQI derived from the maxUlCqi */
18923 allRcd->cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
18924 allRcd->tpc = ulAllocInfo->alloc->grnt.tpc;
18926 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18928 cell->measurements.ulBytesCnt += ulAllocInfo->allocdBytes;
18933 /** PHR handling for MSG3
18934 * @brief Records allocation information of msg3 in the the UE.
18938 * Function: rgSCHCmnUlRecMsg3Alloc
18939 * Purpose: Records information about msg3 allocation.
18940 * This includes the allocated bytes, as well
18941 * as some power information.
18943 * Invoked by: Scheduler
18945 * @param[in] RgSchCellCb *cell
18946 * @param[in] RgSchUeCb *ue
18947 * @param[in] RgSchRaCb *raCb
18951 PUBLIC Void rgSCHCmnUlRecMsg3Alloc
18958 PUBLIC Void rgSCHCmnUlRecMsg3Alloc(cell, ue, raCb)
18964 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18965 CmLListCp *lst = &ueUl->ulAllocLst;
18966 CmLList *node = ueUl->ulAllocLst.first;
18967 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18969 /* Stack Crash problem for TRACE5 changes */
18970 TRC2(rgSCHCmnUlRecMsg3Alloc);
18972 cmLListDelFrm(lst, node);
18973 allRcd->allocTime = raCb->msg3AllocTime;
18974 cmLListAdd2Tail(lst, node);
18976 /* Filling in the parameters to be recorded */
18977 allRcd->alloc = raCb->msg3Grnt.datSz;
18978 allRcd->numRb = raCb->msg3Grnt.numRb;
18979 allRcd->cqi = raCb->ccchCqi;
18980 allRcd->tpc = raCb->msg3Grnt.tpc;
18982 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18987 * @brief Keeps track of the most recent RG_SCH_CMN_MAX_ALLOC_TRACK
18988 * allocations to track. Adds this allocation to the ueUl's ulAllocLst.
18993 * Function: rgSCHCmnUlUpdOutStndAlloc
18994 * Purpose: Recent Allocation shall be at First Pos'n.
18995 * Remove the last node, update the fields
18996 * with the new allocation and add at front.
18998 * Invoked by: Scheduler
19000 * @param[in] RgSchCellCb *cell
19001 * @param[in] RgSchUeCb *ue
19002 * @param[in] U32 alloc
19006 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc
19013 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc(cell, ue, alloc)
19019 U32 nonLcg0Alloc=0;
19020 TRC2(rgSCHCmnUlUpdOutStndAlloc);
19022 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
19023 if (((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs > alloc)
19025 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs -= alloc;
19029 nonLcg0Alloc = alloc - ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19030 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = 0;
19033 if (nonLcg0Alloc >= ue->ul.nonLcg0Bs)
19035 ue->ul.nonLcg0Bs = 0;
19039 ue->ul.nonLcg0Bs -= nonLcg0Alloc;
19041 /* Cap effBsr with effAmbr and append lcg0 bs.
19042 * effAmbr limit applies only to lcg1,2,3 non GBR LCG's*/
19043 /* better be handled in individual scheduler */
19044 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
19045 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19047 if (ue->ul.effBsr == 0)
19049 if (ue->bsrTmr.tmrEvnt != TMR_NONE)
19051 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
19054 if (FALSE == ue->isSrGrant)
19056 if (ue->ul.bsrTmrCfg.isPrdBsrTmrPres)
19059 rgSCHTmrStartTmr(cell, ue, RG_SCH_TMR_BSR,
19060 ue->ul.bsrTmrCfg.prdBsrTmr);
19066 /* Resetting UEs lower Cap */
19067 ue->ul.minReqBytes = 0;
19074 * @brief Returns the "Itbs" for a given UE.
19078 * Function: rgSCHCmnUlGetITbs
19079 * Purpose: This function returns the "Itbs" for a given UE.
19081 * Invoked by: Scheduler
19083 * @param[in] RgSchUeCb *ue
19087 PUBLIC U8 rgSCHCmnUlGetITbs
19094 PUBLIC U8 rgSCHCmnUlGetITbs(cell, ue, isEcp)
19100 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
19101 /* CQI will be capped to maxUlCqi for 16qam UEs */
19102 CmLteUeCategory ueCtgy = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
19106 U8 maxiTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ueUl->maxUlCqi];
19109 TRC2(rgSCHCmnUlGetITbs);
19111 /* #ifdef RG_SCH_CMN_EXT_CP_SUP For ECP pick index 1 */
19113 if ( (ueCtgy != CM_LTE_UE_CAT_5) &&
19114 (ueUl->validUlCqi > ueUl->maxUlCqi)
19117 cqi = ueUl->maxUlCqi;
19121 cqi = ueUl->validUlCqi;
19125 iTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
19127 RG_SCH_CHK_ITBS_RANGE(iTbs, maxiTbs);
19129 iTbs = RGSCH_MIN(iTbs, ue->cell->thresholds.maxUlItbs);
19132 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
19133 was seen when IMCS exceeds 20 on T2k TDD */
19142 if ( (ueCtgy != CM_LTE_UE_CAT_5) && (ueUl->crntUlCqi[0] > ueUl->maxUlCqi ))
19144 cqi = ueUl->maxUlCqi;
19148 cqi = ueUl->crntUlCqi[0];
19151 RETVALUE(rgSchCmnUlCqiToTbsTbl[(U8)isEcp][cqi]);
19155 * @brief This function adds the UE to DLRbAllocInfo TX lst.
19159 * Function: rgSCHCmnDlRbInfoAddUeTx
19160 * Purpose: This function adds the UE to DLRbAllocInfo TX lst.
19162 * Invoked by: Common Scheduler
19164 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19165 * @param[in] RgSchUeCb *ue
19166 * @param[in] RgSchDlHqProcCb *hqP
19171 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx
19174 RgSchCmnDlRbAllocInfo *allocInfo,
19176 RgSchDlHqProcCb *hqP
19179 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx(cell, allocInfo, ue, hqP)
19181 RgSchCmnDlRbAllocInfo *allocInfo;
19183 RgSchDlHqProcCb *hqP;
19186 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
19188 TRC2(rgSCHCmnDlRbInfoAddUeTx);
19190 if (hqP->reqLnk.node == NULLP)
19192 if (cellSch->dl.isDlFreqSel)
19194 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19195 &allocInfo->dedAlloc.txHqPLst, hqP);
19200 cmLListAdd2Tail(&allocInfo->dedAlloc.txHqPLst, &hqP->reqLnk);
19202 hqP->reqLnk.node = (PTR)hqP;
19209 * @brief This function adds the UE to DLRbAllocInfo RETX lst.
19213 * Function: rgSCHCmnDlRbInfoAddUeRetx
19214 * Purpose: This function adds the UE to DLRbAllocInfo RETX lst.
19216 * Invoked by: Common Scheduler
19218 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19219 * @param[in] RgSchUeCb *ue
19220 * @param[in] RgSchDlHqProcCb *hqP
19225 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx
19228 RgSchCmnDlRbAllocInfo *allocInfo,
19230 RgSchDlHqProcCb *hqP
19233 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx(cell, allocInfo, ue, hqP)
19235 RgSchCmnDlRbAllocInfo *allocInfo;
19237 RgSchDlHqProcCb *hqP;
19240 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19242 TRC2(rgSCHCmnDlRbInfoAddUeRetx);
19244 if (cellSch->dl.isDlFreqSel)
19246 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19247 &allocInfo->dedAlloc.retxHqPLst, hqP);
19251 /* checking UE's presence in this lst is unnecessary */
19252 cmLListAdd2Tail(&allocInfo->dedAlloc.retxHqPLst, &hqP->reqLnk);
19253 hqP->reqLnk.node = (PTR)hqP;
19259 * @brief This function adds the UE to DLRbAllocInfo TX-RETX lst.
19263 * Function: rgSCHCmnDlRbInfoAddUeRetxTx
19264 * Purpose: This adds the UE to DLRbAllocInfo TX-RETX lst.
19266 * Invoked by: Common Scheduler
19268 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19269 * @param[in] RgSchUeCb *ue
19270 * @param[in] RgSchDlHqProcCb *hqP
19275 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx
19278 RgSchCmnDlRbAllocInfo *allocInfo,
19280 RgSchDlHqProcCb *hqP
19283 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx(allocInfo, ue, hqP)
19285 RgSchCmnDlRbAllocInfo *allocInfo;
19287 RgSchDlHqProcCb *hqP;
19290 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19292 TRC2(rgSCHCmnDlRbInfoAddUeRetxTx);
19294 if (cellSch->dl.isDlFreqSel)
19296 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19297 &allocInfo->dedAlloc.txRetxHqPLst, hqP);
19301 cmLListAdd2Tail(&allocInfo->dedAlloc.txRetxHqPLst, &hqP->reqLnk);
19302 hqP->reqLnk.node = (PTR)hqP;
19308 * @brief This function adds the UE to DLRbAllocInfo NonSchdRetxLst.
19312 * Function: rgSCHCmnDlAdd2NonSchdRetxLst
19313 * Purpose: During RB estimation for RETX, if allocation fails
19314 * then appending it to NonSchdRetxLst, the further
19315 * action is taken as part of Finalization in
19316 * respective schedulers.
19318 * Invoked by: Common Scheduler
19320 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19321 * @param[in] RgSchUeCb *ue
19322 * @param[in] RgSchDlHqProcCb *hqP
19327 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst
19329 RgSchCmnDlRbAllocInfo *allocInfo,
19331 RgSchDlHqProcCb *hqP
19334 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst(allocInfo, ue, hqP)
19335 RgSchCmnDlRbAllocInfo *allocInfo;
19337 RgSchDlHqProcCb *hqP;
19340 CmLList *schdLnkNode;
19342 TRC2(rgSCHCmnDlAdd2NonSchdRetxLst);
19345 if ( (hqP->sch != (RgSchCmnDlHqProc *)NULLP) &&
19346 (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP)))
19352 schdLnkNode = &hqP->schdLstLnk;
19353 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
19354 cmLListAdd2Tail(&allocInfo->dedAlloc.nonSchdRetxHqPLst, schdLnkNode);
19362 * @brief This function adds the UE to DLRbAllocInfo NonSchdTxRetxLst.
19366 * Function: rgSCHCmnDlAdd2NonSchdTxRetxLst
19367 * Purpose: During RB estimation for TXRETX, if allocation fails
19368 * then appending it to NonSchdTxRetxLst, the further
19369 * action is taken as part of Finalization in
19370 * respective schedulers.
19372 * Invoked by: Common Scheduler
19374 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19375 * @param[in] RgSchUeCb *ue
19376 * @param[in] RgSchDlHqProcCb *hqP
19382 * @brief This function handles the initialisation of DL HARQ/ACK feedback
19383 * timing information for eaach DL subframe.
19387 * Function: rgSCHCmnDlANFdbkInit
19388 * Purpose: Each DL subframe stores the sfn and subframe
19389 * information of UL subframe in which it expects
19390 * HARQ ACK/NACK feedback for this subframe.It
19391 * generates the information based on Downlink
19392 * Association Set Index table.
19394 * Invoked by: Scheduler
19396 * @param[in] RgSchCellCb* cell
19401 PRIVATE S16 rgSCHCmnDlANFdbkInit
19406 PRIVATE S16 rgSCHCmnDlANFdbkInit(cell)
19411 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19412 U8 maxDlSubfrms = cell->numDlSubfrms;
19419 RgSchTddSubfrmInfo ulSubfrmInfo;
19422 TRC2(rgSCHCmnDlANFdbkInit);
19424 ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19425 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19427 /* Generate HARQ ACK/NACK feedback information for each DL sf in a radio frame
19428 * Calculate this information based on DL Association set Index table */
19429 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19431 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19432 RG_SCH_TDD_UL_SUBFRAME)
19434 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19438 for(idx=0; idx < rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19439 numFdbkSubfrms; idx++)
19441 calcSfNum = sfNum - rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19445 calcSfnOffset = RGSCH_CEIL(-calcSfNum, RGSCH_NUM_SUB_FRAMES);
19452 calcSfNum = ((RGSCH_NUM_SUB_FRAMES * calcSfnOffset) + calcSfNum)\
19453 % RGSCH_NUM_SUB_FRAMES;
19455 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19459 else if((ulSubfrmInfo.switchPoints == 2) && (calcSfNum <= \
19460 RG_SCH_CMN_SPL_SUBFRM_6))
19462 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19466 dlIdx = calcSfNum - maxUlSubfrms;
19469 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = sfNum;
19470 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = calcSfnOffset;
19471 cell->subFrms[dlIdx]->dlFdbkInfo.m = idx;
19473 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19476 /* DL subframes in the subsequent radio frames are initialized
19477 * with the previous radio frames */
19478 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;\
19481 sfNum = dlIdx - rgSchTddNumDlSubfrmTbl[ulDlCfgIdx]\
19482 [RGSCH_NUM_SUB_FRAMES-1];
19483 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = \
19484 cell->subFrms[sfNum]->dlFdbkInfo.subframe;
19485 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = \
19486 cell->subFrms[sfNum]->dlFdbkInfo.sfnOffset;
19487 cell->subFrms[dlIdx]->dlFdbkInfo.m = cell->subFrms[sfNum]->dlFdbkInfo.m;
19493 * @brief This function handles the initialization of uplink association
19494 * set information for each DL subframe.
19499 * Function: rgSCHCmnDlKdashUlAscInit
19500 * Purpose: Each DL sf stores the sfn and sf information of UL sf
19501 * in which it expects HQ ACK/NACK trans. It generates the information
19502 * based on k` in UL association set index table.
19504 * Invoked by: Scheduler
19506 * @param[in] RgSchCellCb* cell
19511 PRIVATE S16 rgSCHCmnDlKdashUlAscInit
19516 PRIVATE S16 rgSCHCmnDlKdashUlAscInit(cell)
19521 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19522 U8 maxDlSubfrms = cell->numDlSubfrms;
19528 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19529 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19530 [RGSCH_NUM_SUB_FRAMES-1];
19533 TRC2(rgSCHCmnDlKdashUlAscInit);
19535 /* Generate ACK/NACK offset information for each DL subframe in a radio frame
19536 * Calculate this information based on K` in UL Association Set table */
19537 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19539 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19540 RG_SCH_TDD_UL_SUBFRAME)
19542 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19546 calcSfNum = (sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum] + \
19547 RGSCH_NUM_SUB_FRAMES) % RGSCH_NUM_SUB_FRAMES;
19548 calcSfnOffset = sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum];
19549 if(calcSfnOffset < 0)
19551 calcSfnOffset = RGSCH_CEIL(-calcSfnOffset, RGSCH_NUM_SUB_FRAMES);
19558 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19562 else if((ulSubfrmInfo.switchPoints == 2) &&
19563 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19565 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19569 dlIdx = calcSfNum - maxUlSubfrms;
19572 cell->subFrms[dlIdx]->ulAscInfo.subframe = sfNum;
19573 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset = calcSfnOffset;
19575 /* set dlIdx for which ulAscInfo is updated */
19576 dlPres = dlPres | (1 << dlIdx);
19577 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19580 /* Set Invalid information for which ulAscInfo is not present */
19582 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19585 /* If dlPres is 0, ulAscInfo is not present in that DL index */
19586 if(! ((dlPres >> sfCount)&0x01))
19588 cell->subFrms[sfCount]->ulAscInfo.sfnOffset =
19589 RGSCH_INVALID_INFO;
19590 cell->subFrms[sfCount]->ulAscInfo.subframe =
19591 RGSCH_INVALID_INFO;
19595 /* DL subframes in the subsequent radio frames are initialized
19596 * with the previous radio frames */
19597 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;
19601 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19602 cell->subFrms[dlIdx]->ulAscInfo.subframe =
19603 cell->subFrms[sfNum]->ulAscInfo.subframe;
19604 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset =
19605 cell->subFrms[sfNum]->ulAscInfo.sfnOffset;
19612 * @brief This function initialises the 'Np' value for 'p'
19616 * Function: rgSCHCmnDlNpValInit
19617 * Purpose: To initialise the 'Np' value for each 'p'. It is used
19618 * to find the mapping between nCCE and 'p' and used in
19619 * HARQ ACK/NACK reception.
19621 * Invoked by: Scheduler
19623 * @param[in] RgSchCellCb* cell
19628 PRIVATE S16 rgSCHCmnDlNpValInit
19633 PRIVATE S16 rgSCHCmnDlNpValInit(cell)
19639 TRC2(rgSCHCmnDlNpValInit);
19641 /* Always Np is 0 for p=0 */
19642 cell->rgSchTddNpValTbl[0] = 0;
19644 for(idx=1; idx < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; idx++)
19646 np = cell->bwCfg.dlTotalBw * (idx * RG_SCH_CMN_NUM_SUBCAR - 4);
19647 cell->rgSchTddNpValTbl[idx] = (U8) (np/36);
19654 * @brief This function handles the creation of RACH preamble
19655 * list to queue the preambles and process at the scheduled
19660 * Function: rgSCHCmnDlCreateRachPrmLst
19661 * Purpose: To create RACH preamble list based on RA window size.
19662 * It is used to queue the preambles and process it at the
19665 * Invoked by: Scheduler
19667 * @param[in] RgSchCellCb* cell
19672 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst
19677 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst(cell)
19685 TRC2(rgSCHCmnDlCreateRachPrmLst);
19687 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19689 lstSize = raArrSz * RGSCH_MAX_RA_RNTI_PER_SUBFRM * RGSCH_NUM_SUB_FRAMES;
19691 cell->raInfo.maxRaSize = raArrSz;
19692 ret = rgSCHUtlAllocSBuf(cell->instIdx,
19693 (Data **)(&cell->raInfo.raReqLst), (Size)(lstSize * sizeof(CmLListCp)));
19699 cell->raInfo.lstSize = lstSize;
19706 * @brief This function handles the initialization of RACH Response
19707 * information at each DL subframe.
19711 * Function: rgSCHCmnDlRachInfoInit
19712 * Purpose: Each DL subframe stores the sfn and subframe information of
19713 * possible RACH response allowed for UL subframes. It generates
19714 * the information based on PRACH configuration.
19716 * Invoked by: Scheduler
19718 * @param[in] RgSchCellCb* cell
19723 PRIVATE S16 rgSCHCmnDlRachInfoInit
19728 PRIVATE S16 rgSCHCmnDlRachInfoInit(cell)
19733 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19736 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19737 [RGSCH_NUM_SUB_FRAMES-1];
19739 RgSchTddRachRspLst rachRspLst[3][RGSCH_NUM_SUB_FRAMES];
19747 RgSchTddRachDelInfo *delInfo;
19751 TRC2(rgSCHCmnDlRachInfoInit);
19753 cmMemset((U8 *)rachRspLst, 0, sizeof(rachRspLst));
19755 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19757 /* Include Special subframes */
19758 maxUlSubfrms = maxUlSubfrms + \
19759 rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx].switchPoints;
19760 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19762 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] ==
19763 RG_SCH_TDD_DL_SUBFRAME)
19765 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19769 startWin = (sfNum + RG_SCH_CMN_RARSP_WAIT_PRD + \
19770 ((RgSchCmnCell *)cell->sc.sch)->dl.numRaSubFrms);
19771 endWin = (startWin + cell->rachCfg.raWinSize - 1);
19773 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][startWin%RGSCH_NUM_SUB_FRAMES];
19774 /* Find the next DL subframe starting from Subframe 0 */
19775 if((startSubfrmIdx % RGSCH_NUM_SUB_FRAMES) == 0)
19777 startWin = RGSCH_CEIL(startWin, RGSCH_NUM_SUB_FRAMES);
19778 startWin = startWin * RGSCH_NUM_SUB_FRAMES;
19782 rgSchTddLowDlSubfrmIdxTbl[ulDlCfgIdx][endWin%RGSCH_NUM_SUB_FRAMES];
19783 endWin = (endWin/RGSCH_NUM_SUB_FRAMES) * RGSCH_NUM_SUB_FRAMES \
19785 if(startWin > endWin)
19789 /* Find all the possible RACH Response transmission
19790 * time within the RA window size */
19791 startSubfrmIdx = startWin%RGSCH_NUM_SUB_FRAMES;
19792 for(sfnIdx = startWin/RGSCH_NUM_SUB_FRAMES;
19793 sfnIdx <= endWin/RGSCH_NUM_SUB_FRAMES; sfnIdx++)
19795 if(sfnIdx == endWin/RGSCH_NUM_SUB_FRAMES)
19797 endSubfrmIdx = endWin%RGSCH_NUM_SUB_FRAMES;
19801 endSubfrmIdx = RGSCH_NUM_SUB_FRAMES-1;
19804 /* Find all the possible RACH Response transmission
19805 * time within radio frame */
19806 for(subfrmIdx = startSubfrmIdx;
19807 subfrmIdx <= endSubfrmIdx; subfrmIdx++)
19809 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][subfrmIdx] ==
19810 RG_SCH_TDD_UL_SUBFRAME)
19814 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
19815 /* Find the next DL subframe starting from Subframe 0 */
19816 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
19820 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx], subfrmIdx);
19822 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
19823 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset = sfnIdx;
19824 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].subframe[numSubfrms]
19826 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms++;
19828 startSubfrmIdx = RG_SCH_CMN_SUBFRM_0;
19830 /* Update the subframes to be deleted at this subframe */
19831 /* Get the subframe after the end of RA window size */
19834 sfnOffset = endWin/RGSCH_NUM_SUB_FRAMES;
19837 sfnOffset += raArrSz;
19839 sfnIdx = (endWin/RGSCH_NUM_SUB_FRAMES) % raArrSz;
19841 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx],endSubfrmIdx-1);
19842 if((endSubfrmIdx == RGSCH_NUM_SUB_FRAMES) ||
19843 (rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx] ==
19844 RGSCH_NUM_SUB_FRAMES))
19847 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][RG_SCH_CMN_SUBFRM_0];
19851 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx];
19854 delInfo = &rachRspLst[sfnIdx][subfrmIdx].delInfo;
19855 delInfo->sfnOffset = sfnOffset;
19856 delInfo->subframe[delInfo->numSubfrms] = sfNum;
19857 delInfo->numSubfrms++;
19859 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19862 ret = rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz);
19872 * @brief This function handles the initialization of PHICH information
19873 * for each DL subframe based on PHICH table.
19877 * Function: rgSCHCmnDlPhichOffsetInit
19878 * Purpose: Each DL subf stores the sfn and subf information of UL subframe
19879 * for which it trnsmts PHICH in this subframe. It generates the information
19880 * based on PHICH table.
19882 * Invoked by: Scheduler
19884 * @param[in] RgSchCellCb* cell
19889 PRIVATE S16 rgSCHCmnDlPhichOffsetInit
19894 PRIVATE S16 rgSCHCmnDlPhichOffsetInit(cell)
19899 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19900 U8 maxDlSubfrms = cell->numDlSubfrms;
19907 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19908 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19909 [RGSCH_NUM_SUB_FRAMES-1];
19911 TRC2(rgSCHCmnDlPhichOffsetInit);
19913 /* Generate PHICH offset information for each DL subframe in a radio frame
19914 * Calculate this information based on K in PHICH table */
19915 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19917 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19918 RG_SCH_TDD_UL_SUBFRAME)
19920 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19924 calcSfNum = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) % \
19925 RGSCH_NUM_SUB_FRAMES;
19926 calcSfnOffset = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) / \
19927 RGSCH_NUM_SUB_FRAMES;
19929 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19933 else if((ulSubfrmInfo.switchPoints == 2) &&
19934 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19936 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19940 dlIdx = calcSfNum - maxUlSubfrms;
19943 cell->subFrms[dlIdx]->phichOffInfo.subframe = sfNum;
19944 cell->subFrms[dlIdx]->phichOffInfo.numSubfrms = 1;
19946 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset = calcSfnOffset;
19948 /* set dlIdx for which phich offset is updated */
19949 dlPres = dlPres | (1 << dlIdx);
19950 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19953 /* Set Invalid information for which phich offset is not present */
19955 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19958 /* If dlPres is 0, phich offset is not present in that DL index */
19959 if(! ((dlPres >> sfCount)&0x01))
19961 cell->subFrms[sfCount]->phichOffInfo.sfnOffset =
19962 RGSCH_INVALID_INFO;
19963 cell->subFrms[sfCount]->phichOffInfo.subframe =
19964 RGSCH_INVALID_INFO;
19965 cell->subFrms[sfCount]->phichOffInfo.numSubfrms = 0;
19969 /* DL subframes in the subsequent radio frames are
19970 * initialized with the previous radio frames */
19971 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms;
19972 dlIdx < maxDlSubfrms; dlIdx++)
19975 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19977 cell->subFrms[dlIdx]->phichOffInfo.subframe =
19978 cell->subFrms[sfNum]->phichOffInfo.subframe;
19980 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset =
19981 cell->subFrms[sfNum]->phichOffInfo.sfnOffset;
19988 * @brief Updation of Sch vars per TTI.
19992 * Function: rgSCHCmnUpdVars
19993 * Purpose: Updation of Sch vars per TTI.
19995 * @param[in] RgSchCellCb *cell
20000 PUBLIC Void rgSCHCmnUpdVars
20005 PUBLIC Void rgSCHCmnUpdVars(cell)
20009 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
20010 CmLteTimingInfo timeInfo;
20013 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
20016 TRC2(rgSCHCmnUpdVars);
20018 /* ccpu00132654-ADD- Initializing all the indices in every subframe*/
20019 rgSCHCmnInitVars(cell);
20021 idx = (cell->crntTime.subframe + TFU_ULCNTRL_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20022 /* Calculate the UL scheduling subframe idx based on the
20024 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][idx] != 0)
20026 /* PUSCH transmission is based on offset from DL
20027 * PDCCH scheduling */
20028 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20029 ulSubframe = rgSchTddPuschTxKTbl[ulDlCfgIdx][timeInfo.subframe];
20030 /* Add the DCI-0 to PUSCH time to get the time of UL subframe */
20031 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, ulSubframe);
20033 cellUl->schdTti = timeInfo.sfn * 10 + timeInfo.subframe;
20035 /* Fetch the corresponding UL subframe Idx in UL sf array */
20036 cellUl->schdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20037 /* Fetch the corresponding UL Harq Proc ID */
20038 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20039 cellUl->schdTime = timeInfo;
20041 Mval = rgSchTddPhichMValTbl[ulDlCfgIdx][idx];
20044 /* Fetch the tx time for DL HIDCI-0 */
20045 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20046 /* Fetch the corresponding n-k tx time of PUSCH */
20047 cellUl->hqFdbkIdx[0] = rgSCHCmnGetPhichUlSfIdx(&timeInfo, cell);
20048 /* Retx will happen according to the Pusch k table */
20049 cellUl->reTxIdx[0] = cellUl->schdIdx;
20051 if(ulDlCfgIdx == 0)
20053 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[0] */
20054 cellUl->reTxIdx[0] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20055 cellUl->hqFdbkIdx[0]);
20058 /* At Idx 1 store the UL SF adjacent(left) to the UL SF
20060 cellUl->hqFdbkIdx[1] = (cellUl->hqFdbkIdx[0]-1 +
20061 cellUl->numUlSubfrms) % cellUl->numUlSubfrms;
20062 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[1] */
20063 cellUl->reTxIdx[1] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20064 cellUl->hqFdbkIdx[1]);
20069 idx = (cell->crntTime.subframe + TFU_RECPREQ_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20070 if (rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] == RG_SCH_TDD_UL_SUBFRAME)
20072 RGSCHCMNADDTOCRNTTIME(cell->crntTime, timeInfo, TFU_RECPREQ_DLDELTA)
20073 cellUl->rcpReqIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20075 idx = (cell->crntTime.subframe+RG_SCH_CMN_DL_DELTA) % RGSCH_NUM_SUB_FRAMES;
20077 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
20078 special subframe */
20079 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] != RG_SCH_TDD_UL_SUBFRAME)
20081 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20082 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20083 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, msg3Subfrm);
20084 cellUl->msg3SchdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20085 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20088 if(!rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][idx])
20090 cellUl->spsUlRsrvIdx = RGSCH_INVALID_INFO;
20094 /* introduce some reuse with above code? */
20096 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20097 //offst = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20098 offst = rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][timeInfo.subframe];
20099 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, offst);
20100 cellUl->spsUlRsrvIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20101 /* The harq proc continues to be accessed and used the same delta before
20102 * actual data occurance, and hence use the same idx */
20103 cellUl->spsUlRsrvHqProcIdx = cellUl->schdHqProcIdx;
20107 /* RACHO: update cmn sched specific RACH variables,
20108 * mainly the prachMaskIndex */
20109 rgSCHCmnUpdRachParam(cell);
20115 * @brief To get 'p' value from nCCE.
20119 * Function: rgSCHCmnGetPValFrmCCE
20120 * Purpose: Gets 'p' value for HARQ ACK/NACK reception from CCE.
20122 * @param[in] RgSchCellCb *cell
20123 * @param[in] U8 cce
20128 PUBLIC U8 rgSCHCmnGetPValFrmCCE
20134 PUBLIC U8 rgSCHCmnGetPValFrmCCE(cell, cce)
20140 TRC2(rgSCHCmnGetPValFrmCCE);
20142 for(i=1; i < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; i++)
20144 if(cce < cell->rgSchTddNpValTbl[i])
20153 /***********************************************************
20155 * Func : rgSCHCmnUlAdapRetx
20157 * Desc : Adaptive retransmission for an allocation.
20165 **********************************************************/
20167 PRIVATE Void rgSCHCmnUlAdapRetx
20169 RgSchUlAlloc *alloc,
20170 RgSchUlHqProcCb *proc
20173 PRIVATE Void rgSCHCmnUlAdapRetx(alloc, proc)
20174 RgSchUlAlloc *alloc;
20175 RgSchUlHqProcCb *proc;
20178 TRC2(rgSCHCmnUlAdapRetx);
20180 rgSCHUhmRetx(proc, alloc);
20182 if (proc->rvIdx != 0)
20184 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[proc->rvIdx];
20189 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
20195 * @brief Scheduler invocation per TTI.
20199 * Function: rgSCHCmnHdlUlInactUes
20202 * Invoked by: Common Scheduler
20204 * @param[in] RgSchCellCb *cell
20208 PRIVATE Void rgSCHCmnHdlUlInactUes
20213 PRIVATE Void rgSCHCmnHdlUlInactUes(cell)
20217 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20218 CmLListCp ulInactvLst;
20219 TRC2(rgSCHCmnHdlUlInactUes);
20220 /* Get a List of Inactv UEs for UL*/
20221 cmLListInit(&ulInactvLst);
20223 /* Trigger Spfc Schedulers with Inactive UEs */
20224 rgSCHMeasGapANRepGetUlInactvUe (cell, &ulInactvLst);
20225 /* take care of this in UL retransmission */
20226 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInactvLst);
20232 * @brief Scheduler invocation per TTI.
20236 * Function: rgSCHCmnHdlDlInactUes
20239 * Invoked by: Common Scheduler
20241 * @param[in] RgSchCellCb *cell
20245 PRIVATE Void rgSCHCmnHdlDlInactUes
20250 PRIVATE Void rgSCHCmnHdlDlInactUes(cell)
20254 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20255 CmLListCp dlInactvLst;
20256 TRC2(rgSCHCmnHdlDlInactUes);
20257 /* Get a List of Inactv UEs for DL */
20258 cmLListInit(&dlInactvLst);
20260 /* Trigger Spfc Schedulers with Inactive UEs */
20261 rgSCHMeasGapANRepGetDlInactvUe (cell, &dlInactvLst);
20263 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInactvLst);
20267 /* RACHO: Rach handover functions start here */
20268 /***********************************************************
20270 * Func : rgSCHCmnUeIdleExdThrsld
20272 * Desc : RETURN ROK if UE has been idle more
20281 **********************************************************/
20283 PRIVATE S16 rgSCHCmnUeIdleExdThrsld
20289 PRIVATE S16 rgSCHCmnUeIdleExdThrsld(cell, ue)
20294 /* Time difference in subframes */
20295 U32 sfDiff = RGSCH_CALC_SF_DIFF(cell->crntTime, ue->ul.ulTransTime);
20297 TRC2(rgSCHCmnUeIdleExdThrsld);
20299 if (sfDiff > (U32)RG_SCH_CMN_UE_IDLE_THRSLD(ue))
20311 * @brief Scheduler processing for Ded Preambles on cell configuration.
20315 * Function : rgSCHCmnCfgRachDedPrm
20317 * This function does requisite initialisation
20318 * for RACH Ded Preambles.
20321 * @param[in] RgSchCellCb *cell
20325 PRIVATE Void rgSCHCmnCfgRachDedPrm
20330 PRIVATE Void rgSCHCmnCfgRachDedPrm(cell)
20334 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20335 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20338 TRC2(rgSCHCmnCfgRachDedPrm);
20340 if (cell->macPreambleSet.pres == NOTPRSNT)
20344 cellSch->rachCfg.numDedPrm = cell->macPreambleSet.size;
20345 cellSch->rachCfg.dedPrmStart = cell->macPreambleSet.start;
20346 /* Initialize handover List */
20347 cmLListInit(&cellSch->rachCfg.hoUeLst);
20348 /* Initialize pdcch Order List */
20349 cmLListInit(&cellSch->rachCfg.pdcchOdrLst);
20351 /* Intialize the rapId to UE mapping structure */
20352 for (cnt = 0; cnt<cellSch->rachCfg.numDedPrm; cnt++)
20354 cellSch->rachCfg.rapIdMap[cnt].rapId = cellSch->rachCfg.dedPrmStart + \
20356 cmLListInit(&cellSch->rachCfg.rapIdMap[cnt].assgndUes);
20358 /* Perform Prach Mask Idx, remDedPrm, applFrm initializations */
20359 /* Set remDedPrm as numDedPrm */
20360 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20361 /* Initialize applFrm */
20362 cellSch->rachCfg.prachMskIndx = 0;
20363 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_EVEN)
20365 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + \
20366 (cell->crntTime.sfn % 2)) % RGSCH_MAX_SFN;
20369 else if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ODD)
20371 if((cell->crntTime.sfn%2) == 0)
20373 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + 1)\
20380 cellSch->rachCfg.applFrm.sfn = cell->crntTime.sfn;
20382 /* Initialize cellSch->rachCfg.applFrm as >= crntTime.
20383 * This is because of RGSCH_CALC_SF_DIFF logic */
20384 if (cellSch->rachCfg.applFrm.sfn == cell->crntTime.sfn)
20386 while (cellSch->rachCfg.prachMskIndx < cell->rachCfg.raOccasion.size)
20388 if (cell->crntTime.subframe <\
20389 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx])
20393 cellSch->rachCfg.prachMskIndx++;
20395 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size)
20397 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20399 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) %\
20404 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) %\
20407 cellSch->rachCfg.prachMskIndx = 0;
20409 cellSch->rachCfg.applFrm.subframe = \
20410 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20414 cellSch->rachCfg.applFrm.subframe = \
20415 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20418 /* Note first param to this macro should always be the latest in time */
20419 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20420 while (sfDiff <= gap)
20422 rgSCHCmnUpdNxtPrchMskIdx(cell);
20423 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20430 * @brief Updates the PRACH MASK INDEX.
20434 * Function: rgSCHCmnUpdNxtPrchMskIdx
20435 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20436 * CFG is always >= "n"+"DELTA", where "n" is the crntTime
20437 * of the cell. If not, applFrm is updated to the next avl
20438 * PRACH oppurtunity as per the PRACH Cfg Index configuration.
20441 * Invoked by: Common Scheduler
20443 * @param[in] RgSchCellCb *cell
20447 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx
20452 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx(cell)
20456 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20457 TRC2(rgSCHCmnUpdNxtPrchMskIdx);
20459 /* Determine the next prach mask Index */
20460 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size - 1)
20462 /* PRACH within applFrm.sfn are done, go to next AVL sfn */
20463 cellSch->rachCfg.prachMskIndx = 0;
20464 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20466 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) % \
20469 else/* RGR_SFN_EVEN or RGR_SFN_ODD */
20471 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) % \
20474 cellSch->rachCfg.applFrm.subframe = cell->rachCfg.raOccasion.\
20477 else /* applFrm.sfn is still valid */
20479 cellSch->rachCfg.prachMskIndx += 1;
20480 if ( cellSch->rachCfg.prachMskIndx < RGR_MAX_SUBFRAME_NUM )
20482 cellSch->rachCfg.applFrm.subframe = \
20483 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20490 * @brief Updates the Ded preamble RACH parameters
20495 * Function: rgSCHCmnUpdRachParam
20496 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20497 * CFG is always >= "n"+"6"+"DELTA", where "n" is the crntTime
20498 * of the cell. If not, applFrm is updated to the next avl
20499 * PRACH oppurtunity as per the PRACH Cfg Index configuration,
20500 * accordingly the "remDedPrm" is reset to "numDedPrm" and
20501 * "prachMskIdx" field is updated as per "applFrm".
20504 * Invoked by: Common Scheduler
20506 * @param[in] RgSchCellCb *cell
20510 PRIVATE Void rgSCHCmnUpdRachParam
20515 PRIVATE Void rgSCHCmnUpdRachParam(cell)
20520 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20521 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20523 TRC2(rgSCHCmnUpdRachParam);
20525 if (cell->macPreambleSet.pres == NOTPRSNT)
20529 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, \
20533 /* applFrm is still a valid next Prach Oppurtunity */
20536 rgSCHCmnUpdNxtPrchMskIdx(cell);
20537 /* Reset remDedPrm as numDedPrm */
20538 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20544 * @brief Dedicated Preamble allocation function.
20548 * Function: rgSCHCmnAllocPOParam
20549 * Purpose: Allocate pdcch, rapId and PrachMskIdx.
20550 * Set mapping of UE with the allocated rapId.
20552 * Invoked by: Common Scheduler
20554 * @param[in] RgSchCellCb *cell
20555 * @param[in] RgSchDlSf *dlSf
20556 * @param[in] RgSchUeCb *ue
20557 * @param[out] RgSchPdcch **pdcch
20558 * @param[out] U8 *rapId
20559 * @param[out] U8 *prachMskIdx
20563 PRIVATE S16 rgSCHCmnAllocPOParam
20568 RgSchPdcch **pdcch,
20573 PRIVATE S16 rgSCHCmnAllocPOParam(cell, dlSf, ue, pdcch, rapId, prachMskIdx)
20577 RgSchPdcch **pdcch;
20583 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20584 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20586 TRC2(rgSCHCmnAllocPOParam);
20588 if (cell->macPreambleSet.pres == PRSNT_NODEF)
20590 if (cellSch->rachCfg.remDedPrm == 0)
20594 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20595 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20599 /* The stored prachMskIdx is the index of PRACH Oppurtunities in
20600 * raOccasions.subframes[].
20601 * Converting the same to the actual PRACHMskIdx to be transmitted. */
20602 *prachMskIdx = cellSch->rachCfg.prachMskIndx + 1;
20603 /* Distribution starts from dedPrmStart till dedPrmStart + numDedPrm */
20604 *rapId = cellSch->rachCfg.dedPrmStart +
20605 cellSch->rachCfg.numDedPrm - cellSch->rachCfg.remDedPrm;
20606 cellSch->rachCfg.remDedPrm--;
20607 /* Map UE with the allocated RapId */
20608 ueDl->rachInfo.asgnOppr = cellSch->rachCfg.applFrm;
20609 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, cellSch->rachCfg.rapIdMap, (*rapId - cellSch->rachCfg.dedPrmStart));
20610 cmLListAdd2Tail(&cellSch->rachCfg.rapIdMap[*rapId - cellSch->rachCfg.dedPrmStart].assgndUes,
20611 &ueDl->rachInfo.rapIdLnk);
20612 ueDl->rachInfo.rapIdLnk.node = (PTR)ue;
20613 ueDl->rachInfo.poRapId = *rapId;
20615 else /* if dedicated preambles not configured */
20617 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20618 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20630 * @brief Dowlink Scheduling Handler.
20634 * Function: rgSCHCmnGenPdcchOrder
20635 * Purpose: For each UE in PO Q, grab a PDCCH,
20636 * get an available ded RapId and fill PDCCH
20637 * with PO information.
20639 * Invoked by: Common Scheduler
20641 * @param[in] RgSchCellCb *cell
20642 * @param[in] RgSchDlSf *dlSf
20646 PRIVATE Void rgSCHCmnGenPdcchOrder
20652 PRIVATE Void rgSCHCmnGenPdcchOrder(cell, dlSf)
20657 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20658 CmLList *node = cellSch->rachCfg.pdcchOdrLst.first;
20662 RgSchPdcch *pdcch = NULLP;
20664 TRC2(rgSCHCmnGenPdcchOrder);
20668 ue = (RgSchUeCb *)node->node;
20670 /* Skip sending for this subframe is Measuring or inActive in UL due
20671 * to MeasGap or inactie due to DRX
20673 if ((ue->measGapCb.isMeasuring == TRUE) ||
20674 (ue->ul.ulInactvMask & RG_MEASGAP_INACTIVE) ||
20675 (ue->isDrxEnabled &&
20676 ue->dl.dlInactvMask & RG_DRX_INACTIVE)
20681 if (rgSCHCmnAllocPOParam(cell, dlSf, ue, &pdcch, &rapId,\
20682 &prachMskIdx) != ROK)
20684 /* No More rapIds left for the valid next avl Oppurtunity.
20685 * Unsatisfied UEs here would be given a chance, when the
20686 * prach Mask Index changes as per rachUpd every TTI */
20688 /* PDDCH can also be ordered with rapId=0, prachMskIdx=0
20689 * so that UE triggers a RACH procedure with non-dedicated preamble.
20690 * But the implementation here does not do this. Instead, the "break"
20691 * here implies, that PDCCH Odr always given with valid rapId!=0,
20692 * prachMskIdx!=0 if dedicated preambles are configured.
20693 * If not configured, then trigger a PO with rapId=0,prchMskIdx=0*/
20696 /* Fill pdcch with pdcch odr information */
20697 rgSCHCmnFillPdcchOdr2Sf(cell, ue, pdcch, rapId, prachMskIdx);
20698 /* Remove this UE from the PDCCH ORDER QUEUE */
20699 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20700 /* Reset UE's power state */
20701 rgSCHPwrUeReset(cell, ue);
20708 * @brief This function add UE to PdcchOdr Q if not already present.
20712 * Function: rgSCHCmnDlAdd2PdcchOdrQ
20715 * Invoked by: CMN Scheduler
20717 * @param[in] RgSchCellCb* cell
20718 * @param[in] RgSchUeCb* ue
20723 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ
20729 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ(cell, ue)
20734 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20735 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20737 TRC2(rgSCHCmnDlAdd2PdcchOdrQ);
20739 if (ueDl->rachInfo.poLnk.node == NULLP)
20741 cmLListAdd2Tail(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20742 ueDl->rachInfo.poLnk.node = (PTR)ue;
20749 * @brief This function rmvs UE to PdcchOdr Q if not already present.
20753 * Function: rgSCHCmnDlRmvFrmPdcchOdrQ
20756 * Invoked by: CMN Scheduler
20758 * @param[in] RgSchCellCb* cell
20759 * @param[in] RgSchUeCb* ue
20764 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ
20770 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue)
20775 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20776 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20778 TRC2(rgSCHCmnDlRmvFrmPdcchOdrQ);
20780 cmLListDelFrm(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20781 ueDl->rachInfo.poLnk.node = NULLP;
20786 * @brief Fill pdcch with PDCCH order information.
20790 * Function: rgSCHCmnFillPdcchOdr2Sf
20791 * Purpose: Fill PDCCH with PDCCH order information,
20793 * Invoked by: Common Scheduler
20795 * @param[in] RgSchUeCb *ue
20796 * @param[in] RgSchPdcch *pdcch
20797 * @param[in] U8 rapId
20798 * @param[in] U8 prachMskIdx
20802 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf
20811 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf(ue, pdcch, rapId, prachMskIdx)
20819 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
20821 TRC2(rgSCHCmnFillPdcchOdr2Sf);
20823 pdcch->rnti = ue->ueId;
20824 pdcch->dci.dciFormat = TFU_DCI_FORMAT_1A;
20825 pdcch->dci.u.format1aInfo.isPdcchOrder = TRUE;
20826 pdcch->dci.u.format1aInfo.t.pdcchOrder.preambleIdx = rapId;
20827 pdcch->dci.u.format1aInfo.t.pdcchOrder.prachMaskIdx = prachMskIdx;
20829 /* Request for APer CQI immediately after PDCCH Order */
20830 /* CR ccpu00144525 */
20832 if(ue->dl.ueDlCqiCfg.aprdCqiCfg.pres)
20834 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
20835 acqiCb->aCqiTrigWt = 0;
20844 * @brief UE deletion for scheduler.
20848 * Function : rgSCHCmnDelRachInfo
20850 * This functions deletes all scheduler information
20851 * pertaining to an UE.
20853 * @param[in] RgSchCellCb *cell
20854 * @param[in] RgSchUeCb *ue
20858 PRIVATE Void rgSCHCmnDelRachInfo
20864 PRIVATE Void rgSCHCmnDelRachInfo(cell, ue)
20869 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20870 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20873 TRC2(rgSCHCmnDelRachInfo);
20875 if (ueDl->rachInfo.poLnk.node)
20877 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20879 if (ueDl->rachInfo.hoLnk.node)
20881 cmLListDelFrm(&cellSch->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
20882 ueDl->rachInfo.hoLnk.node = NULLP;
20884 if (ueDl->rachInfo.rapIdLnk.node)
20886 rapIdIdx = ueDl->rachInfo.poRapId - cellSch->rachCfg.dedPrmStart;
20887 cmLListDelFrm(&cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes,
20888 &ueDl->rachInfo.rapIdLnk);
20889 ueDl->rachInfo.rapIdLnk.node = NULLP;
20895 * @brief This function retrieves the ue which has sent this raReq
20896 * and it allocates grant for UEs undergoing (for which RAR
20897 * is being generated) HandOver/PdcchOrder.
20902 * Function: rgSCHCmnHdlHoPo
20903 * Purpose: This function retrieves the ue which has sent this raReq
20904 * and it allocates grant for UEs undergoing (for which RAR
20905 * is being generated) HandOver/PdcchOrder.
20907 * Invoked by: Common Scheduler
20909 * @param[in] RgSchCellCb *cell
20910 * @param[out] CmLListCp *raRspLst
20911 * @param[in] RgSchRaReqInfo *raReq
20916 PRIVATE Void rgSCHCmnHdlHoPo
20919 CmLListCp *raRspLst,
20920 RgSchRaReqInfo *raReq
20923 PRIVATE Void rgSCHCmnHdlHoPo(cell, raRspLst, raReq)
20925 CmLListCp *raRspLst;
20926 RgSchRaReqInfo *raReq;
20929 RgSchUeCb *ue = raReq->ue;
20930 TRC2(rgSCHCmnHdlHoPo);
20932 if ( ue->isDrxEnabled )
20934 rgSCHDrxDedRa(cell,ue);
20936 rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq);
20941 * @brief This function retrieves the UE which has sent this raReq
20942 * for handover case.
20947 * Function: rgSCHCmnGetHoUe
20948 * Purpose: This function retrieves the UE which has sent this raReq
20949 * for handover case.
20951 * Invoked by: Common Scheduler
20953 * @param[in] RgSchCellCb *cell
20954 * @param[in] RgSchRaReqInfo *raReq
20955 * @return RgSchUeCb*
20959 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe
20965 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe(cell, rapId)
20970 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20974 RgSchCmnDlUe *ueDl;
20975 TRC2(rgSCHCmnGetHoUe);
20977 ueLst = &cellSch->rachCfg.hoUeLst;
20978 node = ueLst->first;
20981 ue = (RgSchUeCb *)node->node;
20983 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20984 if (ueDl->rachInfo.hoRapId == rapId)
20993 PRIVATE Void rgSCHCmnDelDedPreamble
20999 PRIVATE rgSCHCmnDelDedPreamble(cell, preambleId)
21004 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21008 RgSchCmnDlUe *ueDl;
21009 TRC2(rgSCHCmnDelDedPreamble);
21011 ueLst = &cellSch->rachCfg.hoUeLst;
21012 node = ueLst->first;
21015 ue = (RgSchUeCb *)node->node;
21017 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21018 if (ueDl->rachInfo.hoRapId == preambleId)
21020 cmLListDelFrm(ueLst, &ueDl->rachInfo.hoLnk);
21021 ueDl->rachInfo.hoLnk.node = (PTR)NULLP;
21027 * @brief This function retrieves the UE which has sent this raReq
21028 * for PDCCh Order case.
21033 * Function: rgSCHCmnGetPoUe
21034 * Purpose: This function retrieves the UE which has sent this raReq
21035 * for PDCCH Order case.
21037 * Invoked by: Common Scheduler
21039 * @param[in] RgSchCellCb *cell
21040 * @param[in] RgSchRaReqInfo *raReq
21041 * @return RgSchUeCb*
21045 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe
21049 CmLteTimingInfo timingInfo
21052 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe(cell, rapId, timingInfo)
21055 CmLteTimingInfo timingInfo;
21058 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21062 RgSchCmnDlUe *ueDl;
21064 TRC2(rgSCHCmnGetPoUe);
21066 rapIdIdx = rapId -cellSch->rachCfg.dedPrmStart;
21067 ueLst = &cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes;
21068 node = ueLst->first;
21071 ue = (RgSchUeCb *)node->node;
21073 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21074 /* Remove UEs irrespective.
21075 * Old UE associations are removed.*/
21076 cmLListDelFrm(ueLst, &ueDl->rachInfo.rapIdLnk);
21077 ueDl->rachInfo.rapIdLnk.node = (PTR)NULLP;
21078 if (RGSCH_TIMEINFO_SAME(ueDl->rachInfo.asgnOppr, timingInfo))
21089 * @brief This function returns the valid UL cqi for a given UE.
21093 * Function: rgSCHCmnUlGetCqi
21094 * Purpose: This function returns the "valid UL cqi" for a given UE
21095 * based on UE category
21097 * Invoked by: Scheduler
21099 * @param[in] RgSchUeCb *ue
21100 * @param[in] U8 ueCtgy
21104 PUBLIC U8 rgSCHCmnUlGetCqi
21108 CmLteUeCategory ueCtgy
21111 PUBLIC U8 rgSCHCmnUlGetCqi(cell, ue, ueCtgy)
21114 CmLteUeCategory ueCtgy;
21117 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21120 TRC2(rgSCHCmnUlGetCqi);
21122 cqi = ueUl->maxUlCqi;
21124 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21125 (ueUl->validUlCqi > ueUl->maxUlCqi)))
21127 cqi = ueUl->validUlCqi;
21130 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21131 (ueUl->crntUlCqi[0] > ueUl->maxUlCqi )))
21133 cqi = ueUl->crntUlCqi[0];
21137 }/* End of rgSCHCmnUlGetCqi */
21139 /***********************************************************
21141 * Func : rgSCHCmnUlRbAllocForPoHoUe
21143 * Desc : Do uplink RB allocation for a HO/PO UE.
21147 * Notes: Note that as of now, for retx, maxRb
21148 * is not considered. Alternatives, such
21149 * as dropping retx if it crosses maxRb
21150 * could be considered.
21154 **********************************************************/
21156 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe
21164 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, maxRb)
21171 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21172 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21173 U8 sbSize = cellUl->sbSize;
21174 U32 maxBits = ue->ul.maxBytesPerUePerTti*8;
21176 RgSchUlAlloc *alloc;
21186 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->msg3SchdHqProcIdx];
21187 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
21189 TRC2(rgSCHCmnUlRbAllocForPoHoUe);
21190 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
21194 /*MS_WORKAROUND for HO ccpu00121116*/
21195 cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
21196 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend], cqi);
21197 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21198 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
21199 while(iMcs > RG_SCH_CMN_MAX_MSG3_IMCS)
21202 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21203 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg);
21205 /* Filling the modorder in the grant structure*/
21206 RG_SCH_UL_MCS_TO_MODODR(iMcs,modOdr);
21207 if (!cell->isCpUlExtend)
21209 eff = rgSchCmnNorUlEff[0][iTbs];
21213 eff = rgSchCmnExtUlEff[0][iTbs];
21216 bits = ueUl->alloc.reqBytes * 8;
21218 #if (ERRCLASS & ERRCLS_DEBUG)
21225 if (bits < rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs))
21228 nPrb = numSb * sbSize;
21232 if (bits > maxBits)
21235 nPrb = bits * 1024 / eff / RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
21240 numSb = nPrb / sbSize;
21244 /*ccpu00128775:MOD-Change to get upper threshold nPrb*/
21245 nPrb = RGSCH_CEIL((RGSCH_CEIL(bits * 1024, eff)),
21246 RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl));
21251 numSb = RGSCH_DIV_ROUND(nPrb, sbSize);
21256 alloc = rgSCHCmnUlSbAlloc(sf, (U8)RGSCH_MIN(numSb, cellUl->maxSbPerUe),\
21258 if (alloc == NULLP)
21260 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
21261 "rgSCHCmnUlRbAllocForPoHoUe(): Could not get UlAlloc");
21264 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
21266 /* Filling the modorder in the grant structure start*/
21267 alloc->grnt.modOdr = (TfuModScheme) modOdr;
21268 alloc->grnt.iMcs = iMcs;
21269 alloc->grnt.iMcsCrnt = iMcsCrnt;
21270 alloc->grnt.hop = 0;
21271 /* Fix for ccpu00123915*/
21272 alloc->forMsg3 = TRUE;
21273 alloc->hqProc = proc;
21274 alloc->hqProc->ulSfIdx = cellUl->msg3SchdIdx;
21276 alloc->rnti = ue->ueId;
21277 /* updating initNumRbs in case of HO */
21279 ue->initNumRbs = alloc->grnt.numRb;
21281 ueUl->alloc.alloc = alloc;
21282 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
21283 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
21284 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
21285 /* MS_WORKAROUND for HO ccpu00121124*/
21286 /*[Adi temp change] Need to fil modOdr */
21287 RG_SCH_UL_MCS_TO_MODODR(alloc->grnt.iMcsCrnt,alloc->grnt.modOdr);
21288 rgSCHUhmNewTx(proc, ueUl->hqEnt.maxHqRetx, alloc);
21289 /* No grant attr recorded now */
21294 * @brief This function allocates grant for UEs undergoing (for which RAR
21295 * is being generated) HandOver/PdcchOrder.
21300 * Function: rgSCHCmnAllocPoHoGrnt
21301 * Purpose: This function allocates grant for UEs undergoing (for which RAR
21302 * is being generated) HandOver/PdcchOrder.
21304 * Invoked by: Common Scheduler
21306 * @param[in] RgSchCellCb *cell
21307 * @param[out] CmLListCp *raRspLst,
21308 * @param[in] RgSchUeCb *ue
21309 * @param[in] RgSchRaReqInfo *raReq
21314 PRIVATE Void rgSCHCmnAllocPoHoGrnt
21317 CmLListCp *raRspLst,
21319 RgSchRaReqInfo *raReq
21322 PRIVATE Void rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq)
21324 CmLListCp *raRspLst;
21326 RgSchRaReqInfo *raReq;
21329 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21330 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21332 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
21334 TRC2(rgSCHCmnAllocPoHoGrnt);
21336 /* Clearing previous allocs if any*/
21337 rgSCHCmnUlUeDelAllocs(cell, ue);
21338 /* Fix : syed allocs are limited */
21339 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
21343 ueUl->alloc.reqBytes = RG_SCH_MIN_GRNT_HOPO;
21344 if (rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, RGSCH_MAX_UL_RB) != ROK)
21349 /* Fill grant information */
21350 grnt = &ueUl->alloc.alloc->grnt;
21355 RLOG_ARG1(L_ERROR,DBG_INSTID,cell->instIdx, "Failed to get"
21356 "the grant for HO/PDCCH Order. CRNTI:%d",ue->ueId);
21359 ue->ul.rarGrnt.rapId = raReq->raReq.rapId;
21360 ue->ul.rarGrnt.hop = grnt->hop;
21361 ue->ul.rarGrnt.rbStart = grnt->rbStart;
21362 ue->ul.rarGrnt.numRb = grnt->numRb;
21363 ue->ul.rarGrnt.tpc = grnt->tpc;
21364 ue->ul.rarGrnt.iMcsCrnt = grnt->iMcsCrnt;
21365 ue->ul.rarGrnt.ta.pres = TRUE;
21366 ue->ul.rarGrnt.ta.val = raReq->raReq.ta;
21367 ue->ul.rarGrnt.datSz = grnt->datSz;
21368 if((sf->numACqiCount < RG_SCH_MAX_ACQI_PER_ULSF) && (RG_SCH_APCQI_NO != ue->dl.reqForCqi))
21372 /* Send two bits cqireq field if more than one cells are configured else one*/
21373 for (idx = 1;idx < CM_LTE_MAX_CELLS;idx++)
21375 if (ue->cellInfo[idx] != NULLP)
21377 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21381 if (idx == CM_LTE_MAX_CELLS)
21384 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21386 ue->dl.reqForCqi = RG_SCH_APCQI_NO;
21387 sf->numACqiCount++;
21391 ue->ul.rarGrnt.cqiReqBit = 0;
21393 /* Attach Ho/Po allocation to RAR Rsp cont free Lst */
21394 cmLListAdd2Tail(raRspLst, &ue->ul.rarGrnt.raRspLnk);
21395 ue->ul.rarGrnt.raRspLnk.node = (PTR)ue;
21401 * @brief This is a utility function to set the fields in
21402 * an UL harq proc which is identified for non-adaptive retx
21406 * Function: rgSCHCmnUlNonadapRetx
21407 * Purpose: Sets the fields in UL Harq proc for non-adaptive retx
21409 * @param[in] RgSchCmnUlCell *cellUl
21410 * @param[out] RgSchUlAlloc *alloc
21411 * @param[in] U8 idx
21416 PRIVATE Void rgSCHCmnUlNonadapRetx
21418 RgSchCmnUlCell *cellUl,
21419 RgSchUlAlloc *alloc,
21423 PRIVATE Void rgSCHCmnUlNonadapRetx(cellUl, alloc, idx)
21424 RgSchCmnUlCell *cellUl;
21425 RgSchUlAlloc *alloc;
21429 TRC2(rgSCHCmnUlNonadapRetx);
21430 rgSCHUhmRetx(alloc->hqProc, alloc);
21432 /* Update alloc to retx */
21433 alloc->hqProc->isRetx = TRUE;
21434 alloc->hqProc->ulSfIdx = cellUl->reTxIdx[idx];
21436 if (alloc->hqProc->rvIdx != 0)
21438 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[alloc->hqProc->rvIdx];
21442 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
21444 alloc->grnt.isRtx = TRUE;
21445 alloc->pdcch = NULLP;
21450 * @brief Check if 2 allocs overlap
21454 * Function : rgSCHCmnUlAllocsOvrLap
21456 * - Return TRUE if alloc1 and alloc2 overlap.
21458 * @param[in] RgSchUlAlloc *alloc1
21459 * @param[in] RgSchUlAlloc *alloc2
21463 PRIVATE Bool rgSCHCmnUlAllocsOvrLap
21465 RgSchUlAlloc *alloc1,
21466 RgSchUlAlloc *alloc2
21469 PRIVATE Bool rgSCHCmnUlAllocsOvrLap(alloc1, alloc2)
21470 RgSchUlAlloc *alloc1;
21471 RgSchUlAlloc *alloc2;
21475 TRC2(rgSCHCmnUlAllocsOvrLap);
21477 if (((alloc1->sbStart >= alloc2->sbStart) &&
21478 (alloc1->sbStart <= alloc2->sbStart + alloc2->numSb-1)) ||
21479 ((alloc2->sbStart >= alloc1->sbStart) &&
21480 (alloc2->sbStart <= alloc1->sbStart + alloc1->numSb-1)))
21488 * @brief Copy allocation Info from src to dst.
21492 * Function : rgSCHCmnUlCpyAllocInfo
21494 * - Copy allocation Info from src to dst.
21496 * @param[in] RgSchUlAlloc *srcAlloc
21497 * @param[in] RgSchUlAlloc *dstAlloc
21501 PRIVATE Void rgSCHCmnUlCpyAllocInfo
21504 RgSchUlAlloc *srcAlloc,
21505 RgSchUlAlloc *dstAlloc
21508 PRIVATE Void rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc)
21510 RgSchUlAlloc *srcAlloc;
21511 RgSchUlAlloc *dstAlloc;
21514 RgSchCmnUlUe *ueUl;
21515 TRC2(rgSCHCmnUlCpyAllocInfo);
21517 dstAlloc->grnt = srcAlloc->grnt;
21518 dstAlloc->hqProc = srcAlloc->hqProc;
21519 /* Fix : syed During UE context release, hqProc->alloc
21520 * was pointing to srcAlloc instead of dstAlloc and
21521 * freeing from incorrect sf->allocDb was
21522 * corrupting the list. */
21523 /* In case of SPS Occasion Allocation is done in advance and
21524 at a later time Hq Proc is linked. Hence HqProc
21525 pointer in alloc shall be NULL */
21527 if (dstAlloc->hqProc)
21530 dstAlloc->hqProc->alloc = dstAlloc;
21532 dstAlloc->ue = srcAlloc->ue;
21533 dstAlloc->rnti = srcAlloc->rnti;
21534 dstAlloc->forMsg3 = srcAlloc->forMsg3;
21535 dstAlloc->raCb = srcAlloc->raCb;
21536 dstAlloc->pdcch = srcAlloc->pdcch;
21537 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
21540 ueUl = RG_SCH_CMN_GET_UL_UE(dstAlloc->ue,cell);
21541 ueUl->alloc.alloc = dstAlloc;
21543 if (dstAlloc->ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
21545 if((dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc != NULLP)
21546 && (dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc == srcAlloc))
21548 dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc = dstAlloc;
21559 * @brief Update TX and RETX subframe's allocation
21564 * Function : rgSCHCmnUlInsAllocFrmNewSf2OldSf
21566 * - Release all preassigned allocations of newSf and merge
21568 * - If alloc of newSf collide with one or more allocs of oldSf
21569 * - mark all such allocs of oldSf for Adaptive Retx.
21570 * - Swap the alloc and hole DB references of oldSf and newSf.
21572 * @param[in] RgSchCellCb *cell
21573 * @param[in] RgSchUlSf *newSf
21574 * @param[in] RgSchUlSf *oldSf
21575 * @param[in] RgSchUlAlloc *srcAlloc
21579 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf
21584 RgSchUlAlloc *srcAlloc
21587 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, srcAlloc)
21591 RgSchUlAlloc *srcAlloc;
21594 RgSchUlAlloc *alloc, *dstAlloc, *nxtAlloc;
21596 /* MS_WORKAROUND ccpu00120827 */
21597 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
21599 TRC2(rgSCHCmnUlInsAllocFrmNewSf2OldSf);
21601 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21605 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21606 /* If there is an overlap between alloc and srcAlloc
21607 * then alloc is marked for Adaptive retx and it is released
21609 if (rgSCHCmnUlAllocsOvrLap(alloc, srcAlloc) == TRUE)
21611 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21612 rgSCHUtlUlAllocRls(oldSf, alloc);
21614 /* No further allocs spanning the srcAlloc subbands */
21615 if (srcAlloc->sbStart + srcAlloc->numSb - 1 <= alloc->sbStart)
21619 } while ((alloc = nxtAlloc) != NULLP);
21622 /* After freeing all the colliding allocs, request for an allocation
21623 * specifying the start and numSb with in txSf. This function should
21624 * always return positively with a nonNULL dstAlloc */
21625 /* MS_WORKAROUND ccpu00120827 */
21626 remAllocs = schCmnCell->ul.maxAllocPerUlSf - *oldSf->allocCountRef;
21629 /* Fix : If oldSf already has max Allocs then release the
21630 * old RETX alloc to make space for new alloc of newSf.
21631 * newSf allocs(i.e new Msg3s) are given higher priority
21632 * over retx allocs. */
21633 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21637 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21638 if (!alloc->mrgdNewTxAlloc)
21640 /* If alloc is for RETX */
21641 /* TODO: Incase of this ad also in case of choosing
21642 * and alloc for ADAP RETX, we need to send ACK for
21643 * the corresponding alloc in PHICH */
21644 #ifndef EMTC_ENABLE
21645 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc);
21647 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc,FALSE);
21651 }while((alloc = nxtAlloc) != NULLP);
21654 dstAlloc = rgSCHUtlUlGetSpfcAlloc(oldSf, srcAlloc->sbStart, srcAlloc->numSb);
21656 /* This should never happen */
21657 if (dstAlloc == NULLP)
21659 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"CRNTI:%d "
21660 "rgSCHUtlUlGetSpfcAlloc failed in rgSCHCmnUlInsAllocFrmNewSf2OldSf",
21665 /* Copy the srcAlloc's state information in to dstAlloc */
21666 rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc);
21667 /* Set new Tx merged Alloc Flag to TRUE, indicating that this
21668 * alloc shall not be processed for non-adaptive retransmission */
21669 dstAlloc->mrgdNewTxAlloc = TRUE;
21674 * @brief Merge all allocations of newSf to oldSf.
21678 * Function : rgSCHCmnUlMergeSfAllocs
21680 * - Merge all allocations of newSf to oldSf.
21681 * - If newSf's alloc collides with oldSf's alloc
21682 * then oldSf's alloc is marked for adaptive Retx
21683 * and is released from oldSf to create space for
21686 * @param[in] RgSchCellCb *cell
21687 * @param[in] RgSchUlSf *oldSf
21688 * @param[in] RgSchUlSf *newSf
21692 PRIVATE Void rgSCHCmnUlMergeSfAllocs
21699 PRIVATE Void rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf)
21705 RgSchUlAlloc *alloc, *nxtAlloc;
21706 TRC2(rgSCHCmnUlMergeSfAllocs);
21709 /* Merge each alloc of newSf in to oldSf
21710 * and release it from newSf */
21711 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21715 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21716 rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, alloc);
21717 rgSCHUtlUlAllocRls(newSf, alloc);
21718 } while((alloc = nxtAlloc) != NULLP);
21724 * @brief Swap Hole/Alloc DB context of newSf and oldSf.
21728 * Function : rgSCHCmnUlSwapSfAllocs
21730 * - Swap Hole/Alloc DB context of newSf and oldSf.
21732 * @param[in] RgSchCellCb *cell
21733 * @param[in] RgSchUlSf *oldSf
21734 * @param[in] RgSchUlSf *newSf
21738 PRIVATE Void rgSCHCmnUlSwapSfAllocs
21745 PRIVATE Void rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf)
21751 RgSchUlAllocDb *tempAllocDb = newSf->allocDb;
21752 RgSchUlHoleDb *tempHoleDb = newSf->holeDb;
21753 U8 tempAvailSbs = newSf->availSubbands;
21755 TRC2(rgSCHCmnUlSwapSfAllocs);
21758 newSf->allocDb = oldSf->allocDb;
21759 newSf->holeDb = oldSf->holeDb;
21760 newSf->availSubbands = oldSf->availSubbands;
21762 oldSf->allocDb = tempAllocDb;
21763 oldSf->holeDb = tempHoleDb;
21764 oldSf->availSubbands = tempAvailSbs;
21766 /* Fix ccpu00120610*/
21767 newSf->allocCountRef = &newSf->allocDb->count;
21768 oldSf->allocCountRef = &oldSf->allocDb->count;
21773 * @brief Perform non-adaptive RETX for non-colliding allocs.
21777 * Function : rgSCHCmnUlPrcNonAdptRetx
21779 * - Perform non-adaptive RETX for non-colliding allocs.
21781 * @param[in] RgSchCellCb *cell
21782 * @param[in] RgSchUlSf *newSf
21783 * @param[in] U8 idx
21787 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx
21794 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx)
21800 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21801 RgSchUlAlloc *alloc, *nxtAlloc;
21802 TRC2(rgSCHCmnUlPrcNonAdptRetx);
21804 /* perform non-adaptive retx allocation(adjustment) */
21805 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21809 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21810 /* A merged new TX alloc, reset the state and skip */
21811 if (alloc->mrgdNewTxAlloc)
21813 alloc->mrgdNewTxAlloc = FALSE;
21818 rgSCHCmnUlNonadapRetx(cellUl, alloc, idx);
21820 } while((alloc = nxtAlloc) != NULLP);
21826 * @brief Update TX and RETX subframe's allocation
21831 * Function : rgSCHCmnUlPrfmSfMerge
21833 * - Release all preassigned allocations of newSf and merge
21835 * - If alloc of newSf collide with one or more allocs of oldSf
21836 * - mark all such allocs of oldSf for Adaptive Retx.
21837 * - Swap the alloc and hole DB references of oldSf and newSf.
21838 * - The allocs which did not collide with pre-assigned msg3
21839 * allocs are marked for non-adaptive RETX.
21841 * @param[in] RgSchCellCb *cell
21842 * @param[in] RgSchUlSf *oldSf
21843 * @param[in] RgSchUlSf *newSf
21844 * @param[in] U8 idx
21848 PRIVATE Void rgSCHCmnUlPrfmSfMerge
21856 PRIVATE Void rgSCHCmnUlPrfmSfMerge(cell, oldSf, newSf, idx)
21863 TRC2(rgSCHCmnUlPrfmSfMerge);
21864 /* Preassigned resources for msg3 in newSf.
21865 * Hence do adaptive retx for all NACKED TXs */
21866 rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf);
21867 /* swap alloc and hole DBs of oldSf and newSf. */
21868 rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf);
21869 /* Here newSf has the resultant merged allocs context */
21870 /* Perform non-adaptive RETX for non-colliding allocs */
21871 rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx);
21877 * @brief Update TX and RETX subframe's allocation
21882 * Function : rgSCHCmnUlRmvCmpltdAllocs
21884 * - Free all Transmission which are ACKED
21885 * OR for which MAX retransmission have
21889 * @param[in] RgSchCellCb *cell,
21890 * @param[in] RgSchUlSf *sf
21894 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs
21900 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs(cell, sf)
21905 RgSchUlAlloc *alloc, *nxtAlloc;
21906 TRC2(rgSCHCmnUlRmvCmpltdAllocs);
21908 if ((alloc = rgSCHUtlUlAllocFirst(sf)) == NULLP)
21914 nxtAlloc = rgSCHUtlUlAllocNxt(sf, alloc);
21916 printf("rgSCHCmnUlRmvCmpltdAllocs:time(%d %d) alloc->hqProc->remTx %d hqProcId(%d) \n",cell->crntTime.sfn,cell->crntTime.subframe,alloc->hqProc->remTx, alloc->grnt.hqProcId);
21918 alloc->hqProc->rcvdCrcInd = TRUE;
21919 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
21922 /* SR_RACH_STATS : MSG 3 MAX RETX FAIL*/
21923 if ((alloc->forMsg3 == TRUE) && (alloc->hqProc->remTx == 0))
21925 rgNumMsg3FailMaxRetx++;
21927 cell->tenbStats->sch.msg3Fail++;
21931 #ifdef MAC_SCH_STATS
21932 if(alloc->ue != NULLP)
21934 /* access from ulHarqProc*/
21935 RgSchUeCb *ueCb = alloc->ue;
21936 RgSchCmnUe *cmnUe = (RgSchCmnUe*)ueCb->sch;
21937 RgSchCmnUlUe *ulUe = &(cmnUe->ul);
21938 U8 cqi = ulUe->crntUlCqi[0];
21939 U16 numUlRetx = ueCb->ul.hqEnt.maxHqRetx - alloc->hqProc->remTx;
21941 hqRetxStats.ulCqiStat[(cqi - 1)].mcs = alloc->grnt.iMcs;
21946 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1++;
21949 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2++;
21952 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3++;
21955 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4++;
21958 hqRetxStats.ulCqiStat[(cqi - 1)].totalTx = \
21959 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1 + \
21960 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2 * 2) + \
21961 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3 * 3) + \
21962 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4 * 4);
21965 #endif /*MAC_SCH_STATS*/
21966 rgSCHCmnUlFreeAllocation(cell, sf, alloc);
21968 /*ccpu00106104 MOD added check for AckNackRep */
21969 /*added check for acknack so that adaptive retx considers ue
21970 inactivity due to ack nack repetition*/
21971 else if((alloc->ue != NULLP) && (TRUE != alloc->forMsg3))
21973 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21974 rgSCHUtlUlAllocRls(sf, alloc);
21976 } while ((alloc = nxtAlloc) != NULLP);
21982 * @brief Update an uplink subframe.
21986 * Function : rgSCHCmnRlsUlSf
21988 * For each allocation
21989 * - if no more tx needed
21990 * - Release allocation
21992 * - Perform retransmission
21994 * @param[in] RgSchUlSf *sf
21995 * @param[in] U8 idx
21999 PUBLIC Void rgSCHCmnRlsUlSf
22005 PUBLIC Void rgSCHCmnRlsUlSf(cell, idx)
22010 TRC2(rgSCHCmnRlsUlSf);
22012 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22014 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
22016 RgSchUlSf *oldSf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
22018 /* Initialize the reTxLst of UL HqProcs for RETX subframe */
22019 if (rgSCHUtlUlAllocFirst(oldSf) == NULLP)
22023 /* Release all completed TX allocs from sf */
22024 rgSCHCmnUlRmvCmpltdAllocs(cell, oldSf);
22026 oldSf->numACqiCount = 0;
22032 * @brief Handle uplink allocation for retransmission.
22036 * Function : rgSCHCmnUlUpdAllocRetx
22038 * - Perform adaptive retransmission
22040 * @param[in] RgSchUlSf *sf
22041 * @param[in] RgSchUlAlloc *alloc
22045 PRIVATE Void rgSCHCmnUlUpdAllocRetx
22048 RgSchUlAlloc *alloc
22051 PRIVATE Void rgSCHCmnUlUpdAllocRetx(cell, alloc)
22053 RgSchUlAlloc *alloc;
22056 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
22058 TRC2(rgSCHCmnUlUpdAllocRetx);
22060 alloc->hqProc->reTxAlloc.rnti = alloc->rnti;
22061 alloc->hqProc->reTxAlloc.numSb = alloc->numSb;
22062 alloc->hqProc->reTxAlloc.iMcs = alloc->grnt.iMcs;
22064 alloc->hqProc->reTxAlloc.dciFrmt = alloc->grnt.dciFrmt;
22065 alloc->hqProc->reTxAlloc.numLyr = alloc->grnt.numLyr;
22066 alloc->hqProc->reTxAlloc.vrbgStart = alloc->grnt.vrbgStart;
22067 alloc->hqProc->reTxAlloc.numVrbg = alloc->grnt.numVrbg;
22068 alloc->hqProc->reTxAlloc.modOdr = alloc->grnt.modOdr;
22070 //iTbs = rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs);
22071 //iTbs = alloc->grnt.iMcs;
22072 //RGSCH_ARRAY_BOUND_CHECK( 0, rgTbSzTbl[0], iTbs);
22073 alloc->hqProc->reTxAlloc.tbSz = alloc->grnt.datSz;
22074 //rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1]/8;
22075 alloc->hqProc->reTxAlloc.ue = alloc->ue;
22076 alloc->hqProc->reTxAlloc.forMsg3 = alloc->forMsg3;
22077 alloc->hqProc->reTxAlloc.raCb = alloc->raCb;
22079 /* Set as retransmission is pending */
22080 alloc->hqProc->isRetx = TRUE;
22081 alloc->hqProc->alloc = NULLP;
22082 alloc->hqProc->ulSfIdx = RGSCH_INVALID_INFO;
22084 printf("Adding Harq Proc Id in the retx list hqProcId %d \n",alloc->grnt.hqProcId);
22086 cmLListAdd2Tail(&cmnUlCell->reTxLst, &alloc->hqProc->reTxLnk);
22087 alloc->hqProc->reTxLnk.node = (PTR)alloc->hqProc;
22092 * @brief Attempts allocation for msg3s for which ADAP retransmissions
22097 * Function : rgSCHCmnUlAdapRetxAlloc
22099 * Attempts allocation for msg3s for which ADAP retransmissions
22102 * @param[in] RgSchCellCb *cell
22103 * @param[in] RgSchUlSf *sf
22104 * @param[in] RgSchUlHqProcCb *proc;
22105 * @param[in] RgSchUlHole *hole;
22109 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc
22113 RgSchUlHqProcCb *proc,
22117 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole)
22120 RgSchUlHqProcCb *proc;
22124 U8 numSb = proc->reTxAlloc.numSb;
22125 U8 iMcs = proc->reTxAlloc.iMcs;
22126 CmLteTimingInfo frm = cell->crntTime;
22127 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22130 RgSchUlAlloc *alloc;
22131 TRC2(rgSCHCmnUlAdapRetxAlloc);
22133 /* Fetch PDCCH for msg3 */
22134 /* ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
22135 /* Introduced timing delta for UL control */
22136 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
22137 dlSf = rgSCHUtlSubFrmGet(cell, frm);
22138 pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
22139 if (pdcch == NULLP)
22144 /* Fetch UL Alloc for msg3 */
22145 if (numSb <= hole->num)
22147 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
22152 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22153 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
22154 "UL Alloc fail for msg3 retx for rnti: %d\n",
22155 proc->reTxAlloc.rnti);
22159 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
22160 alloc->grnt.iMcs = iMcs;
22161 alloc->grnt.datSz = proc->reTxAlloc.tbSz;
22164 //RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
22166 /* Fill UL Alloc for msg3 */
22167 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
22168 alloc->grnt.nDmrs = 0;
22169 alloc->grnt.hop = 0;
22170 alloc->grnt.delayBit = 0;
22171 alloc->grnt.isRtx = TRUE;
22172 proc->ulSfIdx = cellUl->schdIdx;
22174 proc->schdTime = cellUl->schdTime;
22175 alloc->grnt.hqProcId = proc->procId;
22176 alloc->grnt.dciFrmt = proc->reTxAlloc.dciFrmt;
22177 alloc->grnt.numLyr = proc->reTxAlloc.numLyr;
22178 alloc->grnt.vrbgStart = proc->reTxAlloc.vrbgStart;
22179 alloc->grnt.numVrbg = proc->reTxAlloc.numVrbg;
22180 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
22181 alloc->grnt.modOdr = proc->reTxAlloc.modOdr;
22183 /* TODO : Hardcoding these as of now */
22184 alloc->grnt.hop = 0;
22185 alloc->grnt.SCID = 0;
22186 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
22187 alloc->grnt.PMI = 0;
22188 alloc->grnt.uciOnxPUSCH = 0;
22190 alloc->rnti = proc->reTxAlloc.rnti;
22191 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22192 alloc->ue = proc->reTxAlloc.ue;
22193 alloc->pdcch = pdcch;
22194 alloc->forMsg3 = proc->reTxAlloc.forMsg3;
22195 alloc->raCb = proc->reTxAlloc.raCb;
22196 alloc->hqProc = proc;
22197 alloc->isAdaptive = TRUE;
22199 sf->totPrb += alloc->grnt.numRb;
22201 /* FIX : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22204 alloc->raCb->msg3Grnt= alloc->grnt;
22206 /* To the crntTime, add the time at which UE will
22207 * actually send MSG3 */
22208 alloc->raCb->msg3AllocTime = cell->crntTime;
22209 RGSCH_INCR_SUB_FRAME(alloc->raCb->msg3AllocTime, RG_SCH_CMN_MIN_RETXMSG3_RECP_INTRVL);
22211 alloc->raCb->msg3AllocTime = cellUl->schdTime;
22213 rgSCHCmnUlAdapRetx(alloc, proc);
22214 /* Fill PDCCH with alloc info */
22215 pdcch->rnti = alloc->rnti;
22216 pdcch->dci.dciFormat = TFU_DCI_FORMAT_0;
22217 pdcch->dci.u.format0Info.hoppingEnbld = alloc->grnt.hop;
22218 pdcch->dci.u.format0Info.rbStart = alloc->grnt.rbStart;
22219 pdcch->dci.u.format0Info.numRb = alloc->grnt.numRb;
22220 pdcch->dci.u.format0Info.mcs = alloc->grnt.iMcsCrnt;
22221 pdcch->dci.u.format0Info.ndi = alloc->hqProc->ndi;
22222 pdcch->dci.u.format0Info.nDmrs = alloc->grnt.nDmrs;
22223 pdcch->dci.u.format0Info.tpcCmd = alloc->grnt.tpc;
22227 /* ulIdx setting for cfg 0 shall be appropriately fixed thru ccpu00109015 */
22228 pdcch->dci.u.format0Info.ulIdx = RG_SCH_ULIDX_MSB;
22229 pdcch->dci.u.format0Info.dai = RG_SCH_MAX_DAI_IDX;
22232 pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_0];
22236 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue,cell);
22238 alloc->ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
22241 ue->ul.nPrb = alloc->grnt.numRb;
22243 ueUl->alloc.alloc = alloc;
22244 /* FIx: Removed the call to rgSCHCmnUlAdapRetx */
22245 rgSCHCmnUlUeFillAllocInfo(cell, alloc->ue);
22246 /* Setting csireq as false for Adaptive Retx*/
22247 ueUl->alloc.alloc->pdcch->dci.u.format0Info.cqiReq = RG_SCH_APCQI_NO;
22248 pdcch->dciNumOfBits = alloc->ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
22250 /* Reset as retransmission is done */
22251 proc->isRetx = FALSE;
22253 else /* Intg fix */
22255 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22256 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
22257 "Num SB not suffiecient for adap retx for rnti: %d",
22258 proc->reTxAlloc.rnti);
22264 /* Fix: syed Adaptive Msg3 Retx crash. */
22266 * @brief Releases all Adaptive Retx HqProcs which failed for
22267 * allocations in this scheduling occassion.
22271 * Function : rgSCHCmnUlSfRlsRetxProcs
22274 * @param[in] RgSchCellCb *cell
22275 * @param[in] RgSchUlSf *sf
22279 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs
22285 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs(cell, sf)
22292 RgSchUlHqProcCb *proc;
22293 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22295 TRC2(rgSCHCmnUlSfRlsRetxProcs);
22297 cp = &(cellUl->reTxLst);
22301 proc = (RgSchUlHqProcCb *)node->node;
22303 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22304 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22305 proc->reTxLnk.node = (PTR)NULLP;
22312 * @brief Attempts allocation for UEs for which retransmissions
22317 * Function : rgSCHCmnUlSfReTxAllocs
22319 * Attempts allocation for UEs for which retransmissions
22322 * @param[in] RgSchCellCb *cell
22323 * @param[in] RgSchUlSf *sf
22327 PRIVATE Void rgSCHCmnUlSfReTxAllocs
22333 PRIVATE Void rgSCHCmnUlSfReTxAllocs(cell, sf)
22340 RgSchUlHqProcCb *proc;
22343 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
22344 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22345 TRC2(rgSCHCmnUlSfReTxAllocs);
22347 cp = &(cellUl->reTxLst);
22351 proc = (RgSchUlHqProcCb *)node->node;
22352 ue = proc->reTxAlloc.ue;
22354 /*ccpu00106104 MOD added check for AckNackRep */
22355 /*added check for acknack so that adaptive retx considers ue
22356 inactivity due to ack nack repetition*/
22357 if((ue != NULLP) &&
22358 ((ue->measGapCb.isMeasuring == TRUE)||
22359 (ue->ackNakRepCb.isAckNakRep == TRUE)))
22363 /* Fix for ccpu00123917: Check if maximum allocs per UL sf have been exhausted */
22364 if (((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
22365 || (sf->allocDb->count == schCmnCell->ul.maxAllocPerUlSf))
22367 /* No more UL BW then return */
22370 /* perform adaptive retx for UE's */
22371 if (rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole) == FALSE)
22375 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22376 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22377 /* Fix: syed Adaptive Msg3 Retx crash. */
22378 proc->reTxLnk.node = (PTR)NULLP;
22384 * @brief Handles RB allocation for downlink.
22388 * Function : rgSCHCmnDlRbAlloc
22390 * Invoking Module Processing:
22391 * - This function is invoked for DL RB allocation
22393 * Processing Steps:
22394 * - If cell is frequency selecive,
22395 * - Call rgSCHDlfsAllocRb().
22397 * - Call rgSCHCmnNonDlfsRbAlloc().
22399 * @param[in] RgSchCellCb *cell
22400 * @param[in] RgSchDlRbAllocInfo *allocInfo
22405 PRIVATE Void rgSCHCmnDlRbAlloc
22408 RgSchCmnDlRbAllocInfo *allocInfo
22411 PRIVATE Void rgSCHCmnDlRbAlloc(cell, allocInfo)
22413 RgSchCmnDlRbAllocInfo *allocInfo;
22416 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
22417 TRC2(rgSCHCmnDlRbAlloc);
22419 if (cellSch->dl.isDlFreqSel)
22421 printf("5GTF_ERROR DLFS SCH Enabled\n");
22422 cellSch->apisDlfs->rgSCHDlfsAllocRb(cell, allocInfo);
22426 rgSCHCmnNonDlfsRbAlloc(cell, allocInfo);
22434 * @brief Determines number of RBGs and RBG subset sizes for the given DL
22435 * bandwidth and rbgSize
22438 * Function : rgSCHCmnDlGetRbgInfo
22441 * Processing Steps:
22442 * - Fill-up rbgInfo data structure for given DL bandwidth and rbgSize
22444 * @param[in] U8 dlTotalBw
22445 * @param[in] U8 dlSubsetBw
22446 * @param[in] U8 maxRaType1SubsetBw
22447 * @param[in] U8 rbgSize
22448 * @param[out] RgSchBwRbgInfo *rbgInfo
22452 PUBLIC Void rgSCHCmnDlGetRbgInfo
22456 U8 maxRaType1SubsetBw,
22458 RgSchBwRbgInfo *rbgInfo
22461 PUBLIC Void rgSCHCmnDlGetRbgInfo(dlTotalBw, dlSubsetBw, maxRaType1SubsetBw,
22465 U8 maxRaType1SubsetBw;
22467 RgSchBwRbgInfo *rbgInfo;
22470 #ifdef RGSCH_SPS_UNUSED
22472 U8 lastRbgIdx = ((dlTotalBw + rbgSize - 1)/rbgSize) - 1;
22473 U8 currRbgSize = rbgSize;
22474 U8 subsetSizeIdx = 0;
22475 U8 subsetSize[RG_SCH_NUM_RATYPE1_SUBSETS] = {0};
22476 U8 lastRbgSize = rbgSize - (dlTotalBw - ((dlTotalBw/rbgSize) * rbgSize));
22477 U8 numRaType1Rbgs = (maxRaType1SubsetBw + rbgSize - 1)/rbgSize;
22480 /* Compute maximum number of SPS RBGs for the cell */
22481 rbgInfo->numRbgs = ((dlSubsetBw + rbgSize - 1)/rbgSize);
22483 #ifdef RGSCH_SPS_UNUSED
22484 /* Distribute RBGs across subsets except last RBG */
22485 for (;idx < numRaType1Rbgs - 1; ++idx)
22487 subsetSize[subsetSizeIdx] += currRbgSize;
22488 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22491 /* Computation for last RBG */
22492 if (idx == lastRbgIdx)
22494 currRbgSize = lastRbgSize;
22496 subsetSize[subsetSizeIdx] += currRbgSize;
22497 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22500 /* Update the computed sizes */
22501 #ifdef RGSCH_SPS_UNUSED
22502 rbgInfo->lastRbgSize = currRbgSize;
22504 rbgInfo->lastRbgSize = rbgSize -
22505 (dlSubsetBw - ((dlSubsetBw/rbgSize) * rbgSize));
22506 #ifdef RGSCH_SPS_UNUSED
22507 cmMemcpy((U8 *)rbgInfo->rbgSubsetSize, (U8 *) subsetSize, 4 * sizeof(U8));
22509 rbgInfo->numRbs = (rbgInfo->numRbgs * rbgSize > dlTotalBw) ?
22510 dlTotalBw:(rbgInfo->numRbgs * rbgSize);
22511 rbgInfo->rbgSize = rbgSize;
22515 * @brief Handles RB allocation for Resource allocation type 0
22519 * Function : rgSCHCmnDlRaType0Alloc
22521 * Invoking Module Processing:
22522 * - This function is invoked for DL RB allocation for resource allocation
22525 * Processing Steps:
22526 * - Determine the available positions in the rbgMask.
22527 * - Allocate RBGs in the available positions.
22528 * - Update RA Type 0, RA Type 1 and RA type 2 masks.
22530 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22531 * @param[in] U8 rbsReq
22532 * @param[in] RgSchBwRbgInfo *rbgInfo
22533 * @param[out] U8 *numAllocRbs
22534 * @param[out] RgSchDlSfAllocInfo *resAllocInfo
22535 * @param[in] Bool isPartialAlloc
22541 PUBLIC U8 rgSCHCmnDlRaType0Alloc
22543 RgSchDlSfAllocInfo *allocedInfo,
22545 RgSchBwRbgInfo *rbgInfo,
22547 RgSchDlSfAllocInfo *resAllocInfo,
22548 Bool isPartialAlloc
22551 PUBLIC U8 rgSCHCmnDlRaType0Alloc(allocedInfo, rbsReq, rbgInfo,
22552 numAllocRbs, resAllocInfo, isPartialAlloc)
22553 RgSchDlSfAllocInfo *allocedInfo;
22555 RgSchBwRbgInfo *rbgInfo;
22557 RgSchDlSfAllocInfo *resAllocInfo;
22558 Bool isPartialAlloc;
22561 /* Note: This function atttempts allocation only full allocation */
22562 U32 remNumRbs, rbgPosInRbgMask, ueRaType2Mask;
22563 U8 type2MaskIdx, cnt, rbIdx;
22565 U8 bestNumAvailRbs = 0;
22567 U8 numAllocRbgs = 0;
22568 U8 rbgSize = rbgInfo->rbgSize;
22569 U32 *rbgMask = &(resAllocInfo->raType0Mask);
22570 #ifdef RGSCH_SPS_UNUSED
22573 U32 *raType1Mask = resAllocInfo->raType1Mask;
22574 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22576 U32 *raType2Mask = resAllocInfo->raType2Mask;
22578 U32 allocedMask = allocedInfo->raType0Mask;
22580 maskSize = rbgInfo->numRbgs;
22583 RG_SCH_CMN_DL_COUNT_ONES(allocedMask, maskSize, &usedRbs);
22584 if (maskSize == usedRbs)
22586 /* All RBGs are allocated, including the last one */
22591 remNumRbs = (maskSize - usedRbs - 1) * rbgSize; /* vamsee: removed minus 1 */
22593 /* If last RBG is available, add last RBG size */
22594 if (!(allocedMask & (1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(maskSize - 1))))
22596 remNumRbs += rbgInfo->lastRbgSize;
22600 /* If complete allocation is needed, check if total requested RBs are available else
22601 * check the best available RBs */
22602 if (!isPartialAlloc)
22604 if (remNumRbs >= rbsReq)
22606 bestNumAvailRbs = rbsReq;
22611 bestNumAvailRbs = remNumRbs > rbsReq ? rbsReq : remNumRbs;
22614 /* Allocate for bestNumAvailRbs */
22615 if (bestNumAvailRbs)
22617 for (rbg = 0; rbg < maskSize - 1; ++rbg)
22619 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22620 if (!(allocedMask & rbgPosInRbgMask))
22622 /* Update RBG mask */
22623 *rbgMask |= rbgPosInRbgMask;
22625 /* Compute RB index of the first RB of the RBG allocated */
22626 rbIdx = rbg * rbgSize;
22628 for (cnt = 0; cnt < rbgSize; ++cnt)
22630 #ifdef RGSCH_SPS_UNUSED
22631 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22633 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22634 #ifdef RGSCH_SPS_UNUSED
22635 /* Update RBG mask for RA type 1 */
22636 raType1Mask[rbgSubset] |= ueRaType1Mask;
22637 raType1UsedRbs[rbgSubset]++;
22639 /* Update RA type 2 mask */
22640 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22643 *numAllocRbs += rbgSize;
22644 remNumRbs -= rbgSize;
22646 if (*numAllocRbs >= bestNumAvailRbs)
22652 /* If last RBG available and allocation is not completed, allocate
22654 if (*numAllocRbs < bestNumAvailRbs)
22656 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22657 *rbgMask |= rbgPosInRbgMask;
22658 *numAllocRbs += rbgInfo->lastRbgSize;
22660 /* Compute RB index of the first RB of the last RBG */
22661 rbIdx = ((rbgInfo->numRbgs - 1 ) * rbgSize ); /* removed minus 1 vamsee */
22663 for (cnt = 0; cnt < rbgInfo->lastRbgSize; ++cnt)
22665 #ifdef RGSCH_SPS_UNUSED
22666 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22668 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22669 #ifdef RGSCH_SPS_UNUSED
22670 /* Update RBG mask for RA type 1 */
22671 raType1Mask[rbgSubset] |= ueRaType1Mask;
22672 raType1UsedRbs[rbgSubset]++;
22674 /* Update RA type 2 mask */
22675 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22678 remNumRbs -= rbgInfo->lastRbgSize;
22681 /* Note: this should complete allocation, not checking for the
22685 RETVALUE(numAllocRbgs);
22688 #ifdef RGSCH_SPS_UNUSED
22690 * @brief Handles RB allocation for Resource allocation type 1
22694 * Function : rgSCHCmnDlRaType1Alloc
22696 * Invoking Module Processing:
22697 * - This function is invoked for DL RB allocation for resource allocation
22700 * Processing Steps:
22701 * - Determine the available positions in the subsets.
22702 * - Allocate RB in the available subset.
22703 * - Update RA Type1, RA type 0 and RA type 2 masks.
22705 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22706 * @param[in] U8 rbsReq
22707 * @param[in] RgSchBwRbgInfo *rbgInfo
22708 * @param[in] U8 startRbgSubset
22709 * @param[in] U8 *allocRbgSubset
22710 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22711 * @param[in] Bool isPartialAlloc
22714 * Number of allocated RBs
22718 PUBLIC U8 rgSCHCmnDlRaType1Alloc
22720 RgSchDlSfAllocInfo *allocedInfo,
22722 RgSchBwRbgInfo *rbgInfo,
22724 U8 *allocRbgSubset,
22725 RgSchDlSfAllocInfo *resAllocInfo,
22726 Bool isPartialAlloc
22729 PUBLIC U8 rgSCHCmnDlRaType1Alloc(allocedInfo, rbsReq,rbgInfo,startRbgSubset,
22730 allocRbgSubset, resAllocInfo, isPartialAlloc)
22731 RgSchDlSfAllocInfo *allocedInfo;
22733 RgSchBwRbgInfo *rbgInfo;
22735 U8 *allocRbgSubset;
22736 RgSchDlSfAllocInfo *resAllocInfo;
22737 Bool isPartialAlloc;
22740 /* Note: This function atttempts only full allocation */
22741 U8 *rbgSubsetSzArr;
22742 U8 type2MaskIdx, subsetIdx, rbIdx, rbInSubset, rbgInSubset;
22743 U8 offset, rbg, maskSize, bestSubsetIdx;
22745 U8 bestNumAvailRbs = 0;
22746 U8 numAllocRbs = 0;
22747 U32 ueRaType2Mask, ueRaType0Mask, rbPosInSubset;
22748 U32 remNumRbs, allocedMask;
22750 U8 rbgSize = rbgInfo->rbgSize;
22751 U8 rbgSubset = startRbgSubset;
22752 U32 *rbgMask = &resAllocInfo->raType0Mask;
22753 U32 *raType1Mask = resAllocInfo->raType1Mask;
22754 U32 *raType2Mask = resAllocInfo->raType2Mask;
22755 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22756 U32 *allocMask = allocedInfo->raType1Mask;
22758 /* Initialize the subset size Array */
22759 rbgSubsetSzArr = rbgInfo->rbgSubsetSize;
22761 /* Perform allocation for RA type 1 */
22762 for (subsetIdx = 0;subsetIdx < rbgSize; ++subsetIdx)
22764 allocedMask = allocMask[rbgSubset];
22765 maskSize = rbgSubsetSzArr[rbgSubset];
22767 /* Determine number of available RBs in the subset */
22768 usedRbs = allocedInfo->raType1UsedRbs[subsetIdx];
22769 remNumRbs = maskSize - usedRbs;
22771 if (remNumRbs >= rbsReq)
22773 bestNumAvailRbs = rbsReq;
22774 bestSubsetIdx = rbgSubset;
22777 else if (isPartialAlloc && (remNumRbs > bestNumAvailRbs))
22779 bestNumAvailRbs = remNumRbs;
22780 bestSubsetIdx = rbgSubset;
22783 rbgSubset = (rbgSubset + 1) % rbgSize;
22784 } /* End of for (each rbgsubset) */
22786 if (bestNumAvailRbs)
22788 /* Initialize alloced mask and subsetSize depending on the RBG
22789 * subset of allocation */
22791 maskSize = rbgSubsetSzArr[bestSubsetIdx];
22792 allocedMask = allocMask[bestSubsetIdx];
22793 RG_SCH_CMN_DL_GET_START_POS(allocedMask, maskSize,
22795 for (; startIdx < rbgSize; ++startIdx, ++startPos)
22797 for (rbInSubset = startPos; rbInSubset < maskSize;
22798 rbInSubset = rbInSubset + rbgSize)
22800 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
22801 if (!(allocedMask & rbPosInSubset))
22803 raType1Mask[bestSubsetIdx] |= rbPosInSubset;
22804 raType1UsedRbs[bestSubsetIdx]++;
22806 /* Compute RB index value for the RB being allocated */
22807 rbgInSubset = rbInSubset /rbgSize;
22808 offset = rbInSubset % rbgSize;
22809 rbg = (rbgInSubset * rbgSize) + bestSubsetIdx;
22810 rbIdx = (rbg * rbgSize) + offset;
22812 /* Update RBG mask for RA type 0 allocation */
22813 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22814 *rbgMask |= ueRaType0Mask;
22816 /* Update RA type 2 mask */
22817 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22818 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22820 /* Update the counters */
22823 if (numAllocRbs == bestNumAvailRbs)
22828 } /* End of for (each position in the subset mask) */
22829 if (numAllocRbs == bestNumAvailRbs)
22833 } /* End of for startIdx = 0 to rbgSize */
22835 *allocRbgSubset = bestSubsetIdx;
22836 } /* End of if (bestNumAvailRbs) */
22838 RETVALUE(numAllocRbs);
22842 * @brief Handles RB allocation for Resource allocation type 2
22846 * Function : rgSCHCmnDlRaType2Alloc
22848 * Invoking Module Processing:
22849 * - This function is invoked for DL RB allocation for resource allocation
22852 * Processing Steps:
22853 * - Determine the available positions in the mask
22854 * - Allocate best fit cosecutive RBs.
22855 * - Update RA Type2, RA type 1 and RA type 0 masks.
22857 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22858 * @param[in] U8 rbsReq
22859 * @param[in] RgSchBwRbgInfo *rbgInfo
22860 * @param[out] U8 *rbStart
22861 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22862 * @param[in] Bool isPartialAlloc
22865 * Number of allocated RBs
22869 PUBLIC U8 rgSCHCmnDlRaType2Alloc
22871 RgSchDlSfAllocInfo *allocedInfo,
22873 RgSchBwRbgInfo *rbgInfo,
22875 RgSchDlSfAllocInfo *resAllocInfo,
22876 Bool isPartialAlloc
22879 PUBLIC U8 rgSCHCmnDlRaType2Alloc(allocedInfo, rbsReq, rbgInfo, rbStart,
22880 resAllocInfo, isPartialAlloc)
22881 RgSchDlSfAllocInfo *allocedInfo;
22883 RgSchBwRbgInfo *rbgInfo;
22885 RgSchDlSfAllocInfo *resAllocInfo;
22886 Bool isPartialAlloc;
22889 U8 numAllocRbs = 0;
22891 U8 rbgSize = rbgInfo->rbgSize;
22892 U32 *rbgMask = &resAllocInfo->raType0Mask;
22893 #ifdef RGSCH_SPS_UNUSED
22894 U32 *raType1Mask = resAllocInfo->raType1Mask;
22896 U32 *raType2Mask = resAllocInfo->raType2Mask;
22897 #ifdef RGSCH_SPS_UNUSED
22898 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22900 U32 *allocedMask = allocedInfo->raType2Mask;
22902 /* Note: This function atttempts only full allocation */
22903 rgSCHCmnDlGetBestFitHole(allocedMask, rbgInfo->numRbs,
22904 raType2Mask, rbsReq, rbStart, &numAllocRbs, isPartialAlloc);
22907 /* Update the allocation in RA type 0 and RA type 1 masks */
22908 U8 rbCnt = numAllocRbs;
22909 #ifdef RGSCH_SPS_UNUSED
22918 /* Update RBG mask for RA type 0 allocation */
22919 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22920 *rbgMask |= ueRaType0Mask;
22922 #ifdef RGSCH_SPS_UNUSED
22923 /* Update RBG mask for RA type 1 */
22924 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22925 raType1Mask[rbgSubset] |= ueRaType1Mask;
22926 raType1UsedRbs[rbgSubset]++;
22928 /* Update the counters */
22934 RETVALUE(numAllocRbs);
22938 * @brief Determines RA type 0 mask from given RB index.
22942 * Function : rgSCHCmnGetRaType0Mask
22945 * Processing Steps:
22946 * - Determine RA Type 0 mask for given rbIdex and rbg size.
22948 * @param[in] U8 rbIdx
22949 * @param[in] U8 rbgSize
22950 * @return U32 RA type 0 mask
22953 PRIVATE U32 rgSCHCmnGetRaType0Mask
22959 PRIVATE U32 rgSCHCmnGetRaType0Mask(rbIdx, rbgSize)
22965 U32 rbgPosInRbgMask = 0;
22967 rbg = rbIdx/rbgSize;
22968 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22970 RETVALUE(rbgPosInRbgMask);
22973 #ifdef RGSCH_SPS_UNUSED
22975 * @brief Determines RA type 1 mask from given RB index.
22979 * Function : rgSCHCmnGetRaType1Mask
22982 * Processing Steps:
22983 * - Determine RA Type 1 mask for given rbIdex and rbg size.
22985 * @param[in] U8 rbIdx
22986 * @param[in] U8 rbgSize
22987 * @param[out] U8 *type1Subset
22988 * @return U32 RA type 1 mask
22991 PRIVATE U32 rgSCHCmnGetRaType1Mask
22998 PRIVATE U32 rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, type1Subset)
23004 U8 rbg, rbgSubset, rbgInSubset, offset, rbInSubset;
23007 rbg = rbIdx/rbgSize;
23008 rbgSubset = rbg % rbgSize;
23009 rbgInSubset = rbg/rbgSize;
23010 offset = rbIdx % rbgSize;
23011 rbInSubset = rbgInSubset * rbgSize + offset;
23012 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
23014 *type1Subset = rbgSubset;
23015 RETVALUE(rbPosInSubset);
23017 #endif /* RGSCH_SPS_UNUSED */
23019 * @brief Determines RA type 2 mask from given RB index.
23023 * Function : rgSCHCmnGetRaType2Mask
23026 * Processing Steps:
23027 * - Determine RA Type 2 mask for given rbIdx and rbg size.
23029 * @param[in] U8 rbIdx
23030 * @param[out] U8 *maskIdx
23031 * @return U32 RA type 2 mask
23034 PRIVATE U32 rgSCHCmnGetRaType2Mask
23040 PRIVATE U32 rgSCHCmnGetRaType2Mask(rbIdx, maskIdx)
23047 *maskIdx = rbIdx / 32;
23048 rbPosInType2 = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbIdx % 32);
23050 RETVALUE(rbPosInType2);
23054 * @brief Performs resource allocation for a non-SPS UE in SPS bandwidth
23058 * Function : rgSCHCmnAllocUeInSpsBw
23061 * Processing Steps:
23062 * - Determine allocation for the UE.
23063 * - Use resource allocation type 0, 1 and 2 for allocation
23064 * within maximum SPS bandwidth.
23066 * @param[in] RgSchDlSf *dlSf
23067 * @param[in] RgSchCellCb *cell
23068 * @param[in] RgSchUeCb *ue
23069 * @param[in] RgSchDlRbAlloc *rbAllocInfo
23070 * @param[in] Bool isPartialAlloc
23076 PUBLIC Bool rgSCHCmnAllocUeInSpsBw
23081 RgSchDlRbAlloc *rbAllocInfo,
23082 Bool isPartialAlloc
23085 PUBLIC Bool rgSCHCmnAllocUeInSpsBw(dlSf, cell, ue, rbAllocInfo, isPartialAlloc)
23089 RgSchDlRbAlloc *rbAllocInfo;
23090 Bool isPartialAlloc;
23093 U8 rbgSize = cell->rbgSize;
23094 U8 numAllocRbs = 0;
23095 U8 numAllocRbgs = 0;
23097 U8 idx, noLyr, iTbs;
23098 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
23099 RgSchDlSfAllocInfo *dlSfAlloc = &rbAllocInfo->dlSf->dlSfAllocInfo;
23100 RgSchBwRbgInfo *spsRbgInfo = &cell->spsBwRbgInfo;
23102 /* SPS_FIX : Check if this Hq proc is scheduled */
23103 if ((0 == rbAllocInfo->tbInfo[0].schdlngForTb) &&
23104 (0 == rbAllocInfo->tbInfo[1].schdlngForTb))
23109 /* Check if the requirement can be accomodated in SPS BW */
23110 if (dlSf->spsAllocdBw == spsRbgInfo->numRbs)
23112 /* SPS Bandwidth has been exhausted: no further allocations possible */
23115 if (!isPartialAlloc)
23117 if((dlSf->spsAllocdBw + rbAllocInfo->rbsReq) > spsRbgInfo->numRbs)
23123 /* Perform allocation for RA type 0 if rbsReq is multiple of RBG size (also
23124 * if RBG size = 1) */
23125 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23127 rbAllocInfo->rbsReq += (rbgSize - rbAllocInfo->rbsReq % rbgSize);
23128 numAllocRbgs = rgSCHCmnDlRaType0Alloc(dlSfAlloc,
23129 rbAllocInfo->rbsReq, spsRbgInfo, &numAllocRbs,
23130 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23132 #ifdef RGSCH_SPS_UNUSED
23133 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23135 /* If no RBS could be allocated, attempt RA TYPE 1 */
23137 numAllocRbs = rgSCHCmnDlRaType1Alloc(dlSfAlloc,
23138 rbAllocInfo->rbsReq, spsRbgInfo, (U8)dlSfAlloc->nxtRbgSubset,
23139 &rbAllocInfo->allocInfo.raType1.rbgSubset,
23140 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23144 dlSfAlloc->nxtRbgSubset =
23145 (rbAllocInfo->allocInfo.raType1.rbgSubset + 1 ) % rbgSize;
23149 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23151 numAllocRbs = rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23152 rbAllocInfo->rbsReq, spsRbgInfo,
23153 &rbStart, &rbAllocInfo->resAllocInfo, isPartialAlloc);
23160 if (!(rbAllocInfo->pdcch =
23161 rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi,\
23162 rbAllocInfo->dciFormat, FALSE)))
23164 /* Note: Returning TRUE since PDCCH might be available for another UE */
23168 /* Update Tb info for each scheduled TB */
23169 iTbs = rbAllocInfo->tbInfo[0].iTbs;
23170 noLyr = rbAllocInfo->tbInfo[0].noLyr;
23171 rbAllocInfo->tbInfo[0].bytesAlloc =
23172 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;
23174 if (rbAllocInfo->tbInfo[1].schdlngForTb)
23176 iTbs = rbAllocInfo->tbInfo[1].iTbs;
23177 noLyr = rbAllocInfo->tbInfo[1].noLyr;
23178 rbAllocInfo->tbInfo[1].bytesAlloc =
23179 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;;
23182 /* Update rbAllocInfo with the allocation information */
23183 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23185 rbAllocInfo->allocInfo.raType0.dlAllocBitMask =
23186 rbAllocInfo->resAllocInfo.raType0Mask;
23187 rbAllocInfo->allocInfo.raType0.numDlAlloc = numAllocRbgs;
23189 #ifdef RGSCH_SPS_UNUSED
23190 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23192 rbAllocInfo->allocInfo.raType1.dlAllocBitMask =
23193 rbAllocInfo->resAllocInfo.raType1Mask[rbAllocInfo->allocInfo.raType1.rbgSubset];
23194 rbAllocInfo->allocInfo.raType1.numDlAlloc = numAllocRbs;
23195 rbAllocInfo->allocInfo.raType1.shift = 0;
23198 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23200 rbAllocInfo->allocInfo.raType2.isLocal = TRUE;
23201 rbAllocInfo->allocInfo.raType2.rbStart = rbStart;
23202 rbAllocInfo->allocInfo.raType2.numRb = numAllocRbs;
23205 rbAllocInfo->rbsAlloc = numAllocRbs;
23206 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
23208 /* Update allocation masks for RA types 0, 1 and 2 in DL SF */
23210 /* Update type 0 allocation mask */
23211 dlSfAlloc->raType0Mask |= rbAllocInfo->resAllocInfo.raType0Mask;
23212 #ifdef RGSCH_SPS_UNUSED
23213 /* Update type 1 allocation masks */
23214 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
23216 dlSfAlloc->raType1Mask[idx] |= rbAllocInfo->resAllocInfo.raType1Mask[idx];
23217 dlSfAlloc->raType1UsedRbs[idx] +=
23218 rbAllocInfo->resAllocInfo.raType1UsedRbs[idx];
23221 /* Update type 2 allocation masks */
23222 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
23224 dlSfAlloc->raType2Mask[idx] |= rbAllocInfo->resAllocInfo.raType2Mask[idx];
23227 dlSf->spsAllocdBw += numAllocRbs;
23231 /***********************************************************
23233 * Func : rgSCHCmnDlGetBestFitHole
23236 * Desc : Converts the best fit hole into allocation and returns the
23237 * allocation information.
23247 **********************************************************/
23249 PRIVATE Void rgSCHCmnDlGetBestFitHole
23253 U32 *crntAllocMask,
23257 Bool isPartialAlloc
23260 PRIVATE Void rgSCHCmnDlGetBestFitHole (allocMask, numMaskRbs,
23261 crntAllocMask, rbsReq, allocStart, allocNumRbs, isPartialAlloc)
23264 U32 *crntAllocMask;
23268 Bool isPartialAlloc;
23271 U8 maskSz = (numMaskRbs + 31)/32;
23272 U8 maxMaskPos = (numMaskRbs % 32);
23273 U8 maskIdx, maskPos;
23274 U8 numAvailRbs = 0;
23275 U8 bestAvailNumRbs = 0;
23276 S8 bestStartPos = -1;
23278 U32 tmpMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23279 U32 bestMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23281 *allocNumRbs = numAvailRbs;
23284 for (maskIdx = 0; maskIdx < maskSz; ++maskIdx)
23287 if (maskIdx == (maskSz - 1))
23289 if (numMaskRbs % 32)
23291 maxMaskPos = numMaskRbs % 32;
23294 for (maskPos = 0; maskPos < maxMaskPos; ++maskPos)
23296 if (!(allocMask[maskIdx] & (1 << (31 - maskPos))))
23298 tmpMask[maskIdx] |= (1 << (31 - maskPos));
23299 if (startPos == -1)
23301 startPos = maskIdx * 32 + maskPos;
23304 if (numAvailRbs == rbsReq)
23306 *allocStart = (U8)startPos;
23307 *allocNumRbs = rbsReq;
23313 if (numAvailRbs > bestAvailNumRbs)
23315 bestAvailNumRbs = numAvailRbs;
23316 bestStartPos = startPos;
23317 cmMemcpy((U8 *)bestMask, (U8 *) tmpMask, 4 * sizeof(U32));
23321 cmMemset((U8 *)tmpMask, 0, 4 * sizeof(U32));
23324 if (*allocNumRbs == rbsReq)
23330 if (*allocNumRbs == rbsReq)
23332 /* Convert the hole into allocation */
23333 cmMemcpy((U8 *)crntAllocMask, (U8 *) tmpMask, 4 * sizeof(U32));
23338 if (bestAvailNumRbs && isPartialAlloc)
23340 /* Partial allocation could have been done */
23341 *allocStart = (U8)bestStartPos;
23342 *allocNumRbs = bestAvailNumRbs;
23343 /* Convert the hole into allocation */
23344 cmMemcpy((U8 *)crntAllocMask, (U8 *) bestMask, 4 * sizeof(U32));
23350 #endif /* LTEMAC_SPS */
23352 /***************************************************************************
23354 * NON-DLFS Allocation functions
23356 * *************************************************************************/
23360 * @brief Function to find out code rate
23364 * Function : rgSCHCmnFindCodeRate
23366 * Processing Steps:
23368 * @param[in] RgSchCellCb *cell
23369 * @param[in] RgSchDlSf *dlSf
23370 * @param[in,out] RgSchDlRbAlloc *allocInfo
23374 PRIVATE Void rgSCHCmnFindCodeRate
23378 RgSchDlRbAlloc *allocInfo,
23382 PRIVATE Void rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,idx)
23385 RgSchDlRbAlloc *allocInfo;
23394 /* Adjust the Imcs and bytes allocated also with respect to the adjusted
23395 RBs - Here we will find out the Imcs by identifying first Highest
23396 number of bits compared to the original bytes allocated. */
23398 * @brief Adjust IMCS according to tbSize and ITBS
23402 * Function : rgSCHCmnNonDlfsPbchTbImcsAdj
23404 * Processing Steps:
23405 * - Adjust Imcs according to tbSize and ITBS.
23407 * @param[in,out] RgSchDlRbAlloc *allocInfo
23408 * @param[in] U8 *idx
23412 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj
23415 RgSchDlRbAlloc *allocInfo,
23420 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj(cell,allocInfo, idx, rbsReq)
23422 RgSchDlRbAlloc *allocInfo;
23432 RgSchDlSf *dlSf = allocInfo->dlSf;
23434 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23435 noLyrs = allocInfo->tbInfo[idx].noLyr;
23437 if((allocInfo->raType == RG_SCH_CMN_RA_TYPE0))
23439 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + dlSf->lstRbgDfct), cell->rbgSize);
23440 noRbs = (noRbgs * cell->rbgSize) - dlSf->lstRbgDfct;
23444 noRbs = allocInfo->rbsReq;
23447 /* This line will help in case if tbs is zero and reduction in MCS is not possible */
23448 if (allocInfo->rbsReq == 0 )
23452 origBytesReq = rgTbSzTbl[noLyrs - 1][tbs][rbsReq - 1]/8;
23454 /* Find out the ITbs & Imcs by identifying first Highest
23455 number of bits compared to the original bytes allocated.*/
23458 if(((rgTbSzTbl[noLyrs - 1][0][noRbs - 1])/8) < origBytesReq)
23460 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyrs - 1], tbs);
23461 while(((rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1])/8) > origBytesReq)
23470 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1]/8;
23471 allocInfo->tbInfo[idx].iTbs = tbs;
23472 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23477 /* Added funcion to adjust TBSize*/
23479 * @brief Function to adjust the tbsize in case of subframe 0 & 5 when
23480 * we were not able to do RB alloc adjustment by adding extra required Rbs
23484 * Function : rgSCHCmnNonDlfsPbchTbSizeAdj
23486 * Processing Steps:
23488 * @param[in,out] RgSchDlRbAlloc *allocInfo
23489 * @param[in] U8 numOvrlapgPbchRb
23490 * @param[in] U8 idx
23491 * @param[in] U8 pbchSsRsSym
23495 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj
23497 RgSchDlRbAlloc *allocInfo,
23498 U8 numOvrlapgPbchRb,
23504 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,idx,bytesReq)
23505 RgSchDlRbAlloc *allocInfo;
23506 U8 numOvrlapgPbchRb;
23512 U32 reducedTbs = 0;
23516 noLyrs = allocInfo->tbInfo[idx].noLyr;
23518 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23520 reducedTbs = bytesReq - (((U32)numOvrlapgPbchRb * (U32)pbchSsRsSym * 6)/8);
23522 /* find out the ITbs & Imcs by identifying first Highest
23523 number of bits compared with reduced bits considering the bits that are
23524 reserved for PBCH/PSS/SSS */
23525 if(((rgTbSzTbl[noLyrs - 1][0][allocInfo->rbsReq - 1])/8) < reducedTbs)
23527 while(((rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1])/8) > reducedTbs)
23536 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1]/8;
23537 allocInfo->tbInfo[idx].iTbs = tbs;
23538 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23543 /* Added this function to find num of ovrlapping PBCH rb*/
23545 * @brief Function to find out how many additional rbs are available
23546 * in the entire bw which can be allocated to a UE
23549 * Function : rgSCHCmnFindNumAddtlRbsAvl
23551 * Processing Steps:
23552 * - Calculates number of additinal rbs available
23554 * @param[in] RgSchCellCb *cell
23555 * @param[in] RgSchDlSf *dlSf
23556 * @param[in,out] RgSchDlRbAlloc *allocInfo
23557 * @param[out] U8 addtlRbsAvl
23561 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl
23565 RgSchDlRbAlloc *allocInfo
23568 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl(cell,dlSf,allocInfo)
23571 RgSchDlRbAlloc *allocInfo;
23574 U8 addtlRbsAvl = 0;
23576 TRC2(rgSCHCmnFindNumAddtlRbsAvl)
23578 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23580 addtlRbsAvl = (((dlSf->type0End - dlSf->type2End + 1)*\
23581 cell->rbgSize) - dlSf->lstRbgDfct) - allocInfo->rbsReq;
23583 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23585 addtlRbsAvl = (dlSf->bw - dlSf->bwAlloced) - allocInfo->rbsReq;
23588 RETVALUE(addtlRbsAvl);
23591 /* Added this function to find num of ovrlapping PBCH rb*/
23593 * @brief Function to find out how many of the requested RBs are
23594 * falling in the center 6 RBs of the downlink bandwidth.
23597 * Function : rgSCHCmnFindNumPbchOvrlapRbs
23599 * Processing Steps:
23600 * - Calculates number of overlapping rbs
23602 * @param[in] RgSchCellCb *cell
23603 * @param[in] RgSchDlSf *dlSf
23604 * @param[in,out] RgSchDlRbAlloc *allocInfo
23605 * @param[out] U8* numOvrlapgPbchRb
23609 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs
23613 RgSchDlRbAlloc *allocInfo,
23614 U8 *numOvrlapgPbchRb
23617 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,numOvrlapgPbchRb)
23620 RgSchDlRbAlloc *allocInfo;
23621 U8 *numOvrlapgPbchRb;
23624 *numOvrlapgPbchRb = 0;
23625 TRC2(rgSCHCmnFindNumPbchOvrlapRbs)
23626 /*Find if we have already crossed the start boundary for PBCH 6 RBs,
23627 * if yes then lets find the number of RBs which are getting overlapped
23628 * with this allocation.*/
23629 if(dlSf->bwAlloced <= (cell->pbchRbStart))
23631 /*We have not crossed the start boundary of PBCH RBs. Now we need
23632 * to know that if take this allocation then how much PBCH RBs
23633 * are overlapping with this allocation.*/
23634 /* Find out the overlapping RBs in the centre 6 RBs */
23635 if((dlSf->bwAlloced + allocInfo->rbsReq) > cell->pbchRbStart)
23637 *numOvrlapgPbchRb = (dlSf->bwAlloced + allocInfo->rbsReq) - (cell->pbchRbStart);
23638 if(*numOvrlapgPbchRb > 6)
23639 *numOvrlapgPbchRb = 6;
23642 else if ((dlSf->bwAlloced > (cell->pbchRbStart)) &&
23643 (dlSf->bwAlloced < (cell->pbchRbEnd)))
23645 /*We have already crossed the start boundary of PBCH RBs.We need to
23646 * find that if we take this allocation then how much of the RBs for
23647 * this allocation will overlap with PBCH RBs.*/
23648 /* Find out the overlapping RBs in the centre 6 RBs */
23649 if(dlSf->bwAlloced + allocInfo->rbsReq < (cell->pbchRbEnd))
23651 /*If we take this allocation then also we are not crossing the
23652 * end boundary of PBCH 6 RBs.*/
23653 *numOvrlapgPbchRb = allocInfo->rbsReq;
23657 /*If we take this allocation then we are crossing the
23658 * end boundary of PBCH 6 RBs.*/
23659 *numOvrlapgPbchRb = (cell->pbchRbEnd) - dlSf->bwAlloced;
23666 * @brief Performs RB allocation adjustment if the requested RBs are
23667 * falling in the center 6 RBs of the downlink bandwidth.
23670 * Function : rgSCHCmnNonDlfsPbchRbAllocAdj
23672 * Processing Steps:
23673 * - Allocate consecutively available RBs.
23675 * @param[in] RgSchCellCb *cell
23676 * @param[in,out] RgSchDlRbAlloc *allocInfo
23677 * @param[in] U8 pbchSsRsSym
23681 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj
23684 RgSchDlRbAlloc *allocInfo,
23689 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo,pbchSsRsSym)
23691 RgSchDlRbAlloc *allocInfo;
23696 RgSchDlSf *dlSf = allocInfo->dlSf;
23697 U8 numOvrlapgPbchRb = 0;
23698 U8 numOvrlapgAdtlPbchRb = 0;
23700 U8 addtlRbsReq = 0;
23701 U8 moreAddtlRbsReq = 0;
23702 U8 addtlRbsAdd = 0;
23703 U8 moreAddtlRbsAdd = 0;
23711 TRC2(rgSCHCmnNonDlfsPbchRbAllocAdj);
23714 origRbsReq = allocInfo->rbsReq;
23715 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23717 totSym = (cell->isCpDlExtend) ? RGSCH_TOT_NUM_SYM_EXTCP : RGSCH_TOT_NUM_SYM_NORCP;
23719 /* Additional RBs are allocated by considering the loss due to
23720 the reserved symbols for CFICH, PBCH, PSS, SSS and cell specific RS */
23722 divResult = (numOvrlapgPbchRb * pbchSsRsSym)/totSym;
23723 if((numOvrlapgPbchRb * pbchSsRsSym) % totSym)
23727 addtlRbsReq = divResult;
23729 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, addtlRbsReq, addtlRbsAdd)
23731 /*Now RBs requires is original requested RBs + these additional RBs to make
23732 * up for PSS/SSS/BCCH.*/
23733 allocInfo->rbsReq = allocInfo->rbsReq + addtlRbsAdd;
23735 /*Check if with these additional RBs we have taken up, these are also falling
23736 * under PBCH RBs range, if yes then we would need to account for
23737 * PSS/BSS/BCCH for these additional RBs too.*/
23738 if(addtlRbsAdd && ((dlSf->bwAlloced + allocInfo->rbsReq - addtlRbsAdd) < (cell->pbchRbEnd)))
23740 if((dlSf->bwAlloced + allocInfo->rbsReq) <= (cell->pbchRbEnd))
23742 /*With additional RBs taken into account, we are not crossing the
23743 * PBCH RB end boundary.Thus here we need to account just for
23744 * overlapping PBCH RBs for these additonal RBs.*/
23745 divResult = (addtlRbsAdd * pbchSsRsSym)/totSym;
23746 if((addtlRbsAdd * pbchSsRsSym) % totSym)
23751 moreAddtlRbsReq = divResult;
23753 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23755 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23760 /*Here we have crossed the PBCH RB end boundary, thus we need to take
23761 * into account the overlapping RBs for additional RBs which will be
23762 * subset of addtlRbs.*/
23763 numOvrlapgAdtlPbchRb = (cell->pbchRbEnd) - ((dlSf->bwAlloced + allocInfo->rbsReq) - addtlRbsAdd);
23765 divResult = (numOvrlapgAdtlPbchRb * pbchSsRsSym)/totSym;
23766 if((numOvrlapgAdtlPbchRb * pbchSsRsSym) % totSym)
23771 moreAddtlRbsReq = divResult;
23773 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23775 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23778 if (isBcchPcch == TRUE)
23783 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23786 /* This case might be for Imcs value 6 and NPrb = 1 case - Not
23787 Adjusting either RBs or Imcs or Bytes Allocated */
23788 allocInfo->rbsReq = allocInfo->rbsReq - addtlRbsAdd - moreAddtlRbsAdd;
23790 else if(tbs && ((0 == addtlRbsAdd) && (moreAddtlRbsAdd == 0)))
23792 /*In case of a situation where we the entire bandwidth is already occupied
23793 * and we dont have room to add additional Rbs then in order to decrease the
23794 * code rate we reduce the tbsize such that we reduce the present calculated
23795 * tbsize by number of bytes that would be occupied by PBCH/PSS/SSS in overlapping
23796 * rbs and find the nearest tbsize which would be less than this deduced value*/
23798 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23800 noLyr = allocInfo->tbInfo[0].noLyr;
23801 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyr - 1], tbs);
23802 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23804 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,bytesReq);
23806 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23808 noLyr = allocInfo->tbInfo[1].noLyr;
23809 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23810 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,bytesReq);
23814 else if(tbs && ((addtlRbsAdd != addtlRbsReq) ||
23815 (addtlRbsAdd && (moreAddtlRbsReq != moreAddtlRbsAdd))))
23817 /*In case of a situation where we were not able to add required number of
23818 * additional RBs then we adjust the Imcs based on original RBs requested.
23819 * Doing this would comensate for the few extra Rbs we have added but inorder
23820 * to comensate for number of RBS we couldnt add we again do the TBSize adjustment*/
23822 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23824 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23826 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23829 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23830 numOvrlapgPbchRb = numOvrlapgPbchRb - (addtlRbsAdd + moreAddtlRbsAdd);
23832 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,allocInfo->tbInfo[0].bytesReq);
23834 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23836 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,allocInfo->tbInfo[1].bytesReq);
23842 /*We hit this code when we were able to add the required additional RBS
23843 * hence we should adjust the IMcs based on orignals RBs requested*/
23845 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23847 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23849 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23854 } /* end of rgSCHCmnNonDlfsPbchRbAllocAdj */
23858 * @brief Performs RB allocation for frequency non-selective cell.
23862 * Function : rgSCHCmnNonDlfsCmnRbAlloc
23864 * Processing Steps:
23865 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
23867 * @param[in] RgSchCellCb *cell
23868 * @param[in, out] RgSchDlRbAlloc *allocInfo
23874 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc
23877 RgSchDlRbAlloc *allocInfo
23880 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
23882 RgSchDlRbAlloc *allocInfo;
23888 U8 pbchSsRsSym = 0;
23891 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
23893 RgSchDlSf *dlSf = allocInfo->dlSf;
23896 U8 spsRbsAlloc = 0;
23897 RgSchDlSfAllocInfo *dlSfAlloc = &allocInfo->dlSf->dlSfAllocInfo;
23899 TRC2(rgSCHCmnNonDlfsCmnRbAlloc);
23901 allocInfo->tbInfo[0].noLyr = 1;
23904 /* Note: Initialize the masks to 0, this might not be needed since alloInfo
23905 * is initialized to 0 at the beginning of allcoation */
23906 allocInfo->resAllocInfo.raType0Mask = 0;
23907 cmMemset((U8*)allocInfo->resAllocInfo.raType1Mask, 0,
23908 RG_SCH_NUM_RATYPE1_32BIT_MASK * sizeof (U32));
23909 cmMemset((U8*)allocInfo->resAllocInfo.raType2Mask, 0,
23910 RG_SCH_NUM_RATYPE2_32BIT_MASK * sizeof (U32));
23912 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
23913 (dlSf->bwAlloced == dlSf->bw))
23915 if(dlSf->bwAlloced == dlSf->bw)
23921 if (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced))
23924 if ((allocInfo->tbInfo[0].imcs < 29) && (dlSf->bwAlloced < dlSf->bw))
23926 if(allocInfo->tbInfo[0].imcs < 29)
23929 /* set the remaining RBs for the requested UE */
23930 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
23931 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23932 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[0][tbs][allocInfo->rbsReq - 1]/8;
23937 /* Attempt RA Type 2 allocation in SPS Bandwidth */
23938 if (dlSf->spsAllocdBw < cell->spsBwRbgInfo.numRbs)
23941 rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23942 allocInfo->rbsReq, &cell->spsBwRbgInfo, &rbStart,
23943 &allocInfo->resAllocInfo, FALSE);
23944 /* rbsAlloc assignment moved from line 16671 to here to avoid
23945 * compilation error. Recheck */
23946 dlSf->spsAllocdBw += spsRbsAlloc;
23949 #endif /* LTEMAC_SPS */
23957 /* Update allocation information */
23958 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
23959 if (allocInfo->pdcch == NULLP)
23963 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
23964 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
23965 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
23966 allocInfo->allocInfo.raType2.isLocal = TRUE;
23970 allocInfo->allocInfo.raType2.rbStart = rbStart;
23971 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
23972 allocInfo->rbsAlloc = allocInfo->rbsReq;
23983 if(!(dlSf->sfNum == 5))
23985 /* case for subframes 1 to 9 except 5 */
23987 allocInfo->allocInfo.raType2.rbStart = rbStart;
23989 /*Fix for ccpu00123918*/
23990 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
23995 pbchFrame = 1; /* case for subframe 5 */
23996 /* In subframe 5, symbols are reserved for PSS and SSS and CFICH
23997 and Cell Specific Reference Signals */
23998 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PSS_SSS_SYM) *
23999 RGSCH_NUM_SC_IN_RB + cell->numCellRSPerSf);
24005 /* In subframe 0, symbols are reserved for PSS, SSS, PBCH, CFICH and
24006 and Cell Specific Reference signals */
24007 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PBCH_SYM +
24008 RGSCH_NUM_PSS_SSS_SYM) * RGSCH_NUM_SC_IN_RB +
24009 cell->numCellRSPerSf);
24010 } /* end of outer else */
24013 (((dlSf->bwAlloced + allocInfo->rbsReq) - cell->pbchRbStart) > 0)&&
24014 (dlSf->bwAlloced < cell->pbchRbEnd))
24016 if(allocInfo->tbInfo[0].imcs < 29)
24018 rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo, pbchSsRsSym, TRUE);
24030 /*Fix for ccpu00123918*/
24031 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24032 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24033 allocInfo->rbsAlloc = allocInfo->rbsReq;
24035 /* LTE_ADV_FLAG_REMOVED_START */
24037 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
24039 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
24040 allocInfo->allocInfo.raType2.rbStart, \
24041 allocInfo->allocInfo.raType2.numRb);
24046 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
24047 allocInfo->allocInfo.raType2.rbStart, \
24048 allocInfo->allocInfo.raType2.numRb);
24054 /* LTE_ADV_FLAG_REMOVED_END */
24055 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24062 /* Update type 0, 1 and 2 masks */
24063 dlSfAlloc->raType0Mask |= allocInfo->resAllocInfo.raType0Mask;
24064 #ifdef RGSCH_SPS_UNUSED
24065 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
24067 dlSfAlloc->raType1Mask[idx] |=
24068 allocInfo->resAllocInfo.raType1Mask[idx];
24069 dlSfAlloc->raType1UsedRbs[idx] +=
24070 allocInfo->resAllocInfo.raType1UsedRbs[idx];
24073 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
24075 dlSfAlloc->raType2Mask[idx] |=
24076 allocInfo->resAllocInfo.raType2Mask[idx];
24086 * @brief Performs RB allocation for frequency non-selective cell.
24090 * Function : rgSCHCmnNonDlfsCmnRbAllocRar
24092 * Processing Steps:
24093 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
24095 * @param[in] RgSchCellCb *cell
24096 * @param[in, out] RgSchDlRbAlloc *allocInfo
24102 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAllocRar
24105 RgSchDlRbAlloc *allocInfo
24108 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
24110 RgSchDlRbAlloc *allocInfo;
24114 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
24115 RgSchDlSf *dlSf = allocInfo->dlSf;
24116 TRC2(rgSCHCmnNonDlfsCmnRbAllocRar);
24119 if(dlSf->bwAlloced == dlSf->bw)
24124 allocInfo->tbInfo[0].noLyr = 1;
24126 /* Update allocation information */
24127 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
24128 if (allocInfo->pdcch == NULLP)
24132 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
24133 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
24134 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
24135 allocInfo->allocInfo.raType2.isLocal = TRUE;
24137 /*Fix for ccpu00123918*/
24138 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24139 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24140 allocInfo->rbsAlloc = allocInfo->rbsReq;
24142 /* LTE_ADV_FLAG_REMOVED_END */
24143 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24146 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, NULLP, dlSf, 13, TFU_DCI_FORMAT_B1, FALSE);
24147 if (allocInfo->pdcch == NULLP)
24151 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
24152 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
24154 printf("5GTF_ERROR vrbg allocated > 25\n");
24158 allocInfo->tbInfo[0].cmnGrnt.vrbgStart = beamInfo->vrbgStart;
24159 allocInfo->tbInfo[0].cmnGrnt.numVrbg = allocInfo->vrbgReq;
24161 /* Update allocation information */
24162 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
24164 allocInfo->tbInfo[0].cmnGrnt.xPDSCHRange = 1;
24165 allocInfo->tbInfo[0].cmnGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
24166 allocInfo->tbInfo[0].cmnGrnt.vrbgStart, allocInfo->tbInfo[0].cmnGrnt.numVrbg);
24168 allocInfo->tbInfo[0].cmnGrnt.rbStrt = (allocInfo->tbInfo[0].cmnGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
24169 allocInfo->tbInfo[0].cmnGrnt.numRb = (allocInfo->tbInfo[0].cmnGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
24171 beamInfo->vrbgStart += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24172 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24173 allocInfo->tbInfo[0].cmnGrnt.rv = 0;
24174 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24177 printf("\n[%s],allocInfo->tbInfo[0].bytesAlloc:%u,vrbgReq:%u\n",
24178 __func__,allocInfo->tbInfo[0].bytesAlloc,allocInfo->vrbgReq);
24184 /* LTE_ADV_FLAG_REMOVED_START */
24187 * @brief To check if DL BW available for non-DLFS allocation.
24191 * Function : rgSCHCmnNonDlfsBwAvlbl
24193 * Processing Steps:
24194 * - Determine availability based on RA Type.
24196 * @param[in] RgSchCellCb *cell
24197 * @param[in] RgSchDlSf *dlSf
24198 * @param[in] RgSchDlRbAlloc *allocInfo
24205 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl
24208 RgSchSFRPoolInfo **sfrpoolInfo,
24210 RgSchDlRbAlloc *allocInfo,
24214 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl(cell, sfrpoolInfo, dlSf, allocInfo, isUeCellEdge)
24216 RgSchSFRPoolInfo **sfrpoolInfo;
24218 RgSchDlRbAlloc *allocInfo;
24226 RgSchSFRPoolInfo *sfrPool;
24227 RgSchSFRPoolInfo *sfrCEPool;
24231 RgSchSFRPoolInfo *poolWithMaxAvlblBw = NULLP;
24233 U32 addtnlPRBs = 0;
24235 if (dlSf->bw <= dlSf->bwAlloced)
24237 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
24238 "BW is fully allocated for subframe (%d) CRNTI:%d", dlSf->sfNum,allocInfo->rnti);
24242 if (dlSf->sfrTotalPoolInfo.ccBwFull == TRUE)
24244 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24245 "BW is fully allocated for CC Pool CRNTI:%d",allocInfo->rnti);
24249 if ((dlSf->sfrTotalPoolInfo.ceBwFull == TRUE) && (isUeCellEdge))
24251 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24252 "BW is fully allocated for CE Pool CRNTI:%d",allocInfo->rnti);
24256 /* We first check if the ue scheduled is a cell edge or cell centre and accordingly check the avaialble
24257 memory in their pool. If the cell centre UE doesnt have Bw available in its pool, then it will check
24258 Bw availability in cell edge pool but the other way around is NOT possible. */
24261 l = &dlSf->sfrTotalPoolInfo.cePool;
24265 l = &dlSf->sfrTotalPoolInfo.ccPool;
24268 n = cmLListFirst(l);
24272 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24274 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24276 /* MS_FIX for ccpu00123919 : Number of RBs in case of RETX should be same as that of initial transmission. */
24277 if(allocInfo->tbInfo[0].tbCb->txCntr)
24279 /* If RB assignment is being done for RETX. Then if reqRbs are a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24280 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24281 if (allocInfo->rbsReq % cell->rbgSize == 0)
24283 if ((sfrPool->type2End == dlSf->type2End) && dlSf->lstRbgDfct)
24285 /* In this scenario we are wasting the last RBG for this dlSf */
24286 sfrPool->type0End--;
24287 sfrPool->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24289 dlSf->lstRbgDfct = 0;
24291 /*ABHINAV To check if these variables need to be taken care of*/
24293 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24298 if (dlSf->lstRbgDfct)
24300 /* Check if type0 allocation can cater to this RETX requirement */
24301 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24307 if (sfrPool->type2End != dlSf->type2End) /*Search again for some pool which has the END RBG of the BandWidth*/
24315 /* cannot allocate same number of required RBs */
24321 /*rg002.301 ccpu00120391 MOD condition is modified approprialtely to find if rbsReq is less than available RBS*/
24322 if(allocInfo->rbsReq <= (((sfrPool->type0End - sfrPool->type2End + 1)*\
24323 cell->rbgSize) - dlSf->lstRbgDfct))
24325 *sfrpoolInfo = sfrPool;
24330 if (sfrPool->bw <= sfrPool->bwAlloced + cell->rbgSize)
24332 n = cmLListNext(l);
24333 /* If the ue is cell centre then it will simply check the memory available in next pool.
24334 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24336 if((!isUeCellEdge) && (!n->node))
24338 l = &dlSf->sfrTotalPoolInfo.cePool;
24339 n = cmLListFirst(l);
24345 /* MS_FIX: Number of RBs in case of RETX should be same as that of initial transmission */
24346 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24348 /*rg002.301 ccpu00120391 MOD setting the remaining RBs for the requested UE*/
24349 allocInfo->rbsReq = (((sfrPool->type0End - sfrPool->type2End + 1)*\
24350 cell->rbgSize) - dlSf->lstRbgDfct);
24351 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24352 noLyrs = allocInfo->tbInfo[0].noLyr;
24353 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24354 *sfrpoolInfo = sfrPool;
24359 n = cmLListNext(l);
24361 /* If the ue is cell centre then it will simply check the memory available in next pool.
24362 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24363 if((!isUeCellEdge) && (!n->node))
24365 l = &dlSf->sfrTotalPoolInfo.cePool;
24366 n = cmLListFirst(l);
24372 // RETVALUE(FALSE);
24375 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24377 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24378 /* This is a Case where a UE was CC and had more RBs allocated than present in CE pool.
24379 In case this UE whn become CE with retx going on, then BW is not sufficient for Retx */
24380 if ((isUeCellEdge) &&
24381 (allocInfo->tbInfo[0].tbCb->txCntr != 0))
24383 if(allocInfo->rbsReq > (sfrPool->bw - sfrPool->bwAlloced))
24385 /* Adjust CE BW such that Retx alloc is successful */
24386 /* Check if merging CE with adjacent CC pool will be sufficient to process Retx */
24388 /* If no Type 0 allocations are made from this pool */
24389 if (sfrPool->type0End == (((sfrPool->poolendRB + 1) / cell->rbgSize) - 1))
24391 if (sfrPool->adjCCPool &&
24392 (sfrPool->adjCCPool->type2Start == sfrPool->poolendRB + 1) &&
24393 (allocInfo->rbsReq <= ((sfrPool->bw - sfrPool->bwAlloced) +
24394 ((sfrPool->adjCCPool->bw - sfrPool->adjCCPool->bwAlloced)))))
24396 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24398 /* Adjusting CE Pool Info */
24399 sfrPool->bw += addtnlPRBs;
24400 sfrPool->type0End = ((sfrPool->poolendRB + addtnlPRBs + 1) /
24401 cell->rbgSize) - 1;
24403 /* Adjusting CC Pool Info */
24404 sfrPool->adjCCPool->type2Start += addtnlPRBs;
24405 sfrPool->adjCCPool->type2End = RGSCH_CEIL(sfrPool->adjCCPool->type2Start,
24407 sfrPool->adjCCPool->bw -= addtnlPRBs;
24408 *sfrpoolInfo = sfrPool;
24415 /* Check if CC pool is one of the following:
24416 * 1. |CE| + |CC "CCPool2Exists" = TRUE|
24417 * 2. |CC "CCPool2Exists" = FALSE| + |CE| + |CC "CCPool2Exists" = TRUE|
24419 if(TRUE == sfrPool->CCPool2Exists)
24421 l1 = &dlSf->sfrTotalPoolInfo.cePool;
24422 n1 = cmLListFirst(l1);
24423 sfrCEPool = (RgSchSFRPoolInfo*)(n1->node);
24424 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced))
24426 *sfrpoolInfo = sfrCEPool;
24429 else if(allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24431 *sfrpoolInfo = sfrPool;
24434 /* Check if CE and CC boundary has unallocated prbs */
24435 else if ((sfrPool->poolstartRB == sfrPool->type2Start) &&
24436 (sfrCEPool->type0End == ((sfrCEPool->poolendRB + 1) / cell->rbgSize) - 1))
24438 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced) +
24439 (sfrPool->bw - sfrPool->bwAlloced))
24441 /* Checking if BW can be allocated partly from CE pool and partly
24444 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24445 /* Updating CE and CC type2 parametrs based on the RBs allocated
24446 * from these pools*/
24447 sfrPool->type2Start -= addtnlPRBs;
24448 sfrPool->type2End = RGSCH_CEIL(sfrPool->type2Start, cell->rbgSize);
24449 sfrPool->bw += addtnlPRBs;
24450 if (addtnlPRBs == (sfrCEPool->bw - sfrCEPool->bwAlloced))
24452 sfrCEPool->bwAlloced = sfrCEPool->bw;
24453 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24457 sfrCEPool->bw -= addtnlPRBs;
24458 sfrCEPool->type0End = ((sfrCEPool->poolendRB + 1 - addtnlPRBs) / cell->rbgSize) - 1;
24460 *sfrpoolInfo = sfrPool;
24463 else if ( bwAvlbl <
24464 ((sfrCEPool->bw - sfrCEPool->bwAlloced) +
24465 (sfrPool->bw - sfrPool->bwAlloced)))
24467 /* All the Prbs from CE BW shall be allocated */
24468 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24470 sfrPool->type2Start = sfrCEPool->type2Start;
24471 sfrPool->bw += sfrCEPool->bw - sfrCEPool->bwAlloced;
24472 sfrCEPool->type2Start = sfrCEPool->poolendRB + 1;
24473 sfrCEPool->bwAlloced = sfrCEPool->bw;
24474 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24476 /* set the remaining RBs for the requested UE */
24477 allocInfo->rbsReq = (sfrPool->bw - sfrPool->bwAlloced);
24478 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24479 noLyrs = allocInfo->tbInfo[0].noLyr;
24480 allocInfo->tbInfo[0].bytesReq =
24481 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24482 *sfrpoolInfo = sfrPool;
24493 /* Checking if no. of RBs required can be allocated from
24495 * 1. If available return the SFR pool.
24496 * 2. Else update the RBs required parameter based on the
24497 * BW available in the pool
24498 * 3. Return FALSE if no B/W is available.
24500 if (allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24502 *sfrpoolInfo = sfrPool;
24507 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24509 if (bwAvlbl < sfrPool->bw - sfrPool->bwAlloced)
24513 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24515 bwAvlbl = sfrPool->bw - sfrPool->bwAlloced;
24516 poolWithMaxAvlblBw = sfrPool;
24518 n = cmLListNext(l);
24520 if ((isUeCellEdge == FALSE) && (n == NULLP))
24522 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24524 l = &dlSf->sfrTotalPoolInfo.cePool;
24525 n = cmLListFirst(l);
24535 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24539 dlSf->sfrTotalPoolInfo.ccBwFull = TRUE;
24545 /* set the remaining RBs for the requested UE */
24546 allocInfo->rbsReq = poolWithMaxAvlblBw->bw -
24547 poolWithMaxAvlblBw->bwAlloced;
24548 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24549 noLyrs = allocInfo->tbInfo[0].noLyr;
24550 allocInfo->tbInfo[0].bytesReq =
24551 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24552 *sfrpoolInfo = poolWithMaxAvlblBw;
24559 n = cmLListNext(l);
24561 if ((isUeCellEdge == FALSE) && (n == NULLP))
24563 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24565 l = &dlSf->sfrTotalPoolInfo.cePool;
24566 n = cmLListFirst(l);
24581 #endif /* end of ifndef LTE_TDD*/
24582 /* LTE_ADV_FLAG_REMOVED_END */
24585 * @brief To check if DL BW available for non-DLFS allocation.
24589 * Function : rgSCHCmnNonDlfsUeRbAlloc
24591 * Processing Steps:
24592 * - Determine availability based on RA Type.
24594 * @param[in] RgSchCellCb *cell
24595 * @param[in] RgSchDlSf *dlSf
24596 * @param[in] RgSchDlRbAlloc *allocInfo
24603 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl
24607 RgSchDlRbAlloc *allocInfo
24610 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl(cell, dlSf, allocInfo)
24613 RgSchDlRbAlloc *allocInfo;
24618 U8 ignoredDfctRbg = FALSE;
24620 TRC2(rgSCHCmnNonDlfsBwAvlbl);
24621 if (dlSf->bw <= dlSf->bwAlloced)
24623 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId, "(%d:%d)FAILED CRNTI:%d",
24624 dlSf->bw, dlSf->bwAlloced,allocInfo->rnti);
24627 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24629 /* Fix for ccpu00123919 : Number of RBs in case of RETX should be same as
24630 * that of initial transmission. */
24631 if(allocInfo->tbInfo[0].tbCb->txCntr)
24633 /* If RB assignment is being done for RETX. Then if reqRbs are
24634 * a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24635 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24636 if (allocInfo->rbsReq % cell->rbgSize == 0)
24638 if (dlSf->lstRbgDfct)
24640 /* In this scenario we are wasting the last RBG for this dlSf */
24643 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24644 /* Fix: MUE_PERTTI_DL */
24645 dlSf->lstRbgDfct = 0;
24646 ignoredDfctRbg = TRUE;
24652 if (dlSf->lstRbgDfct)
24654 /* Check if type0 allocation can cater to this RETX requirement */
24655 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24662 /* cannot allocate same number of required RBs */
24668 /* Condition is modified approprialtely to find
24669 * if rbsReq is less than available RBS*/
24670 if(allocInfo->rbsReq <= (((dlSf->type0End - dlSf->type2End + 1)*\
24671 cell->rbgSize) - dlSf->lstRbgDfct))
24675 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24676 * allocation in TDD when requested RBs are more than available RBs*/
24679 /* MS_WORKAROUND for ccpu00122022 */
24680 if (dlSf->bw < dlSf->bwAlloced + cell->rbgSize)
24682 /* ccpu00132358- Re-assigning the values which were updated above
24683 * if it is RETX and Last RBG available*/
24684 if(ignoredDfctRbg == TRUE)
24687 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24688 dlSf->lstRbgDfct = 1;
24694 /* Fix: Number of RBs in case of RETX should be same as
24695 * that of initial transmission. */
24696 if(allocInfo->tbInfo[0].tbCb->txCntr == 0
24698 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24702 /* Setting the remaining RBs for the requested UE*/
24703 allocInfo->rbsReq = (((dlSf->type0End - dlSf->type2End + 1)*\
24704 cell->rbgSize) - dlSf->lstRbgDfct);
24705 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24706 noLyrs = allocInfo->tbInfo[0].noLyr;
24707 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24708 /* DwPts Scheduling Changes Start */
24710 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24712 allocInfo->tbInfo[0].bytesReq =
24713 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24716 /* DwPts Scheduling Changes End */
24720 /* ccpu00132358- Re-assigning the values which were updated above
24721 * if it is RETX and Last RBG available*/
24722 if(ignoredDfctRbg == TRUE)
24725 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24726 dlSf->lstRbgDfct = 1;
24729 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "FAILED for CRNTI:%d",
24731 printf ("RB Alloc failed for LAA TB type 0\n");
24737 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24739 if (allocInfo->rbsReq <= (dlSf->bw - dlSf->bwAlloced))
24743 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24744 * allocation in TDD when requested RBs are more than available RBs*/
24747 /* Fix: Number of RBs in case of RETX should be same as
24748 * that of initial transmission. */
24749 if((allocInfo->tbInfo[0].tbCb->txCntr == 0)
24751 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24755 /* set the remaining RBs for the requested UE */
24756 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
24757 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24758 noLyrs = allocInfo->tbInfo[0].noLyr;
24759 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24760 /* DwPts Scheduling Changes Start */
24762 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24764 allocInfo->tbInfo[0].bytesReq =
24765 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24768 /* DwPts Scheduling Changes End */
24772 printf ("RB Alloc failed for LAA TB type 2\n");
24773 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24776 /* Fix: Number of RBs in case of RETX should be same as
24777 * that of initial transmission. */
24781 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24784 /* LTE_ADV_FLAG_REMOVED_START */
24787 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24791 * Function : rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24793 * Processing Steps:
24795 * @param[in] RgSchCellCb *cell
24796 * @param[in] RgSchDlSf *dlSf
24797 * @param[in] U8 rbStrt
24798 * @param[in] U8 numRb
24803 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24811 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
24820 RgSchSFRPoolInfo *sfrPool;
24821 TRC2(rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc);
24823 l = &dlSf->sfrTotalPoolInfo.ccPool;
24825 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24826 dlSf->bwAlloced += numRb;
24827 dlSf->type2Start += numRb;
24828 n = cmLListFirst(l);
24832 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24833 n = cmLListNext(l);
24835 /* If the pool contains some RBs allocated in this allocation, e.g: Pool is [30.50]. Pool->type2Start is 40 , dlSf->type2Start is 45. then update the variables in pool */
24836 if((sfrPool->poolendRB >= dlSf->type2Start) && (sfrPool->type2Start < dlSf->type2Start))
24838 sfrPool->type2End = dlSf->type2End;
24839 sfrPool->bwAlloced = dlSf->type2Start - sfrPool->poolstartRB;
24840 sfrPool->type2Start = dlSf->type2Start;
24844 /* If the pool contains all RBs allocated in this allocation*/
24845 if(dlSf->type2Start > sfrPool->poolendRB)
24847 sfrPool->type2End = sfrPool->type0End + 1;
24848 sfrPool->bwAlloced = sfrPool->bw;
24849 sfrPool->type2Start = sfrPool->poolendRB + 1;
24854 if (l != &dlSf->sfrTotalPoolInfo.cePool)
24856 l = &dlSf->sfrTotalPoolInfo.cePool;
24857 n = cmLListFirst(l);
24867 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24871 * Function : rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24873 * Processing Steps:
24875 * @param[in] RgSchCellCb *cell
24876 * @param[in] RgSchDlSf *dlSf
24877 * @param[in] U8 rbStrt
24878 * @param[in] U8 numRb
24883 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24892 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc(cell, ue, dlSf, rbStrt, numRb)
24902 RgSchSFRPoolInfo *sfrCCPool1 = NULL;
24903 RgSchSFRPoolInfo *sfrCCPool2 = NULL;
24906 TRC2(rgSCHCmnNonDlfsUpdDSFRTyp2Alloc);
24907 /* Move the type2End pivot forward */
24910 l = &dlSf->sfrTotalPoolInfo.ccPool;
24911 n = cmLListFirst(l);
24914 sfrCCPool1 = (RgSchSFRPoolInfo*)(n->node);
24916 if (sfrCCPool1 == NULLP)
24918 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24919 "sfrCCPool1 is NULL for CRNTI:%d",ue->ueId);
24922 n = cmLListNext(l);
24925 sfrCCPool2 = (RgSchSFRPoolInfo*)(n->node);
24926 n = cmLListNext(l);
24928 if((sfrCCPool1) && (sfrCCPool2))
24930 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
24931 if(((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24932 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb)) ||
24933 ((dlSf->type2Start >= sfrCCPool2->pwrHiCCRange.startRb) &&
24934 (dlSf->type2Start + numRb < sfrCCPool2->pwrHiCCRange.endRb)))
24936 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24938 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24939 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24942 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24943 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
24950 if((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24951 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb))
24953 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24955 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24956 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24959 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24960 "rgSCHCmnBuildRntpInfo() function returned RFAILED CRNTI:%d",ue->ueId);
24966 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24968 dlSf->bwAlloced += numRb;
24969 /*MS_FIX for ccpu00123918*/
24970 dlSf->type2Start += numRb;
24974 #endif /* end of ifndef LTE_TDD*/
24975 /* LTE_ADV_FLAG_REMOVED_END */
24977 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24981 * Function : rgSCHCmnNonDlfsUpdTyp2Alloc
24983 * Processing Steps:
24985 * @param[in] RgSchCellCb *cell
24986 * @param[in] RgSchDlSf *dlSf
24987 * @param[in] U8 rbStrt
24988 * @param[in] U8 numRb
24993 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc
25001 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
25008 TRC2(rgSCHCmnNonDlfsUpdTyp2Alloc);
25009 /* Move the type2End pivot forward */
25010 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25011 //#ifndef LTEMAC_SPS
25012 dlSf->bwAlloced += numRb;
25013 /*Fix for ccpu00123918*/
25014 dlSf->type2Start += numRb;
25020 * @brief To do DL allocation using TYPE0 RA.
25024 * Function : rgSCHCmnNonDlfsType0Alloc
25026 * Processing Steps:
25027 * - Perform TYPE0 allocation using the RBGs between
25028 * type0End and type2End.
25029 * - Build the allocation mask as per RBG positioning.
25030 * - Update the allocation parameters.
25032 * @param[in] RgSchCellCb *cell
25033 * @param[in] RgSchDlSf *dlSf
25034 * @param[in] RgSchDlRbAlloc *allocInfo
25039 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
25043 RgSchDlRbAlloc *allocInfo,
25047 PRIVATE Void rgSCHCmnNonDlfsType0Alloc(cell, dlSf, allocInfo, dlUe)
25050 RgSchDlRbAlloc *allocInfo;
25054 U32 dlAllocMsk = 0;
25055 U8 rbgFiller = dlSf->lstRbgDfct;
25056 U8 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25057 //U8 noRbgs = (allocInfo->rbsReq + rbgFiller)/ cell->rbgSize;
25061 U32 tb1BytesAlloc = 0;
25062 U32 tb2BytesAlloc = 0;
25063 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25065 TRC2(rgSCHCmnNonDlfsType0Alloc);
25066 //if(noRbgs == 0) noRbgs = 1; /* Not required as ceilling is used above*/
25068 /* Fix for ccpu00123919*/
25069 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25070 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25076 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25079 /* Fix for ccpu00138701: Ceilling is using to derive num of RBGs, Therefore,
25080 * after this operation,checking Max TB size and Max RBs are not crossed
25081 * if it is crossed then decrement num of RBGs. */
25082 //if((noRbs + rbgFiller) % cell->rbgSize)
25083 if((noRbs > allocInfo->rbsReq) &&
25084 (allocInfo->rbsReq + rbgFiller) % cell->rbgSize)
25085 {/* considering ue category limitation
25086 * due to ceiling */
25089 if (rgSCHLaaIsLaaTB(allocInfo)== FALSE)
25092 if ((allocInfo->tbInfo[0].schdlngForTb) && (!allocInfo->tbInfo[0].tbCb->txCntr))
25094 iTbs = allocInfo->tbInfo[0].iTbs;
25095 noLyr = allocInfo->tbInfo[0].noLyr;
25096 tb1BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25099 if ((allocInfo->tbInfo[1].schdlngForTb) && (!allocInfo->tbInfo[1].tbCb->txCntr))
25101 iTbs = allocInfo->tbInfo[1].iTbs;
25102 noLyr = allocInfo->tbInfo[1].noLyr;
25103 tb2BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25107 /* Only Check for New Tx No need for Retx */
25108 if (tb1BytesAlloc || tb2BytesAlloc)
25110 if (( ue->dl.aggTbBits >= dlUe->maxTbBits) ||
25111 (tb1BytesAlloc >= dlUe->maxTbSz/8) ||
25112 (tb2BytesAlloc >= dlUe->maxTbSz/8) ||
25113 (noRbs >= dlUe->maxRb))
25119 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25123 /* type0End would have been initially (during subfrm Init) at the bit position
25124 * (cell->noOfRbgs - 1), 0 being the most significant.
25125 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25126 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - dlSf->type0End));
25127 /* Move backwards the type0End pivot */
25128 dlSf->type0End -= noRbgs;
25129 /*Fix for ccpu00123919*/
25130 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25131 /* Update the bwAlloced field accordingly */
25132 //#ifndef LTEMAC_SPS /* ccpu00129474*/
25133 dlSf->bwAlloced += noRbs;
25135 /* Update Type0 Alloc Info */
25136 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25137 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25138 allocInfo->rbsAlloc = noRbs;
25140 /* Update Tb info for each scheduled TB */
25141 iTbs = allocInfo->tbInfo[0].iTbs;
25142 noLyr = allocInfo->tbInfo[0].noLyr;
25143 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25144 * RETX TB Size is same as Init TX TB Size */
25145 if (allocInfo->tbInfo[0].tbCb->txCntr)
25147 allocInfo->tbInfo[0].bytesAlloc =
25148 allocInfo->tbInfo[0].bytesReq;
25152 allocInfo->tbInfo[0].bytesAlloc =
25153 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25154 /* DwPts Scheduling Changes Start */
25156 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25158 allocInfo->tbInfo[0].bytesAlloc =
25159 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25162 /* DwPts Scheduling Changes End */
25165 if (allocInfo->tbInfo[1].schdlngForTb)
25167 iTbs = allocInfo->tbInfo[1].iTbs;
25168 noLyr = allocInfo->tbInfo[1].noLyr;
25169 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25170 * RETX TB Size is same as Init TX TB Size */
25171 if (allocInfo->tbInfo[1].tbCb->txCntr)
25173 allocInfo->tbInfo[1].bytesAlloc =
25174 allocInfo->tbInfo[1].bytesReq;
25178 allocInfo->tbInfo[1].bytesAlloc =
25179 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25180 /* DwPts Scheduling Changes Start */
25182 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25184 allocInfo->tbInfo[1].bytesAlloc =
25185 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25188 /* DwPts Scheduling Changes End */
25192 /* The last RBG which can be smaller than the RBG size is consedered
25193 * only for the first time allocation of TYPE0 UE */
25194 dlSf->lstRbgDfct = 0;
25200 * @brief To prepare RNTP value from the PRB allocation (P-High -> 1 and P-Low -> 0)
25204 * Function : rgSCHCmnBuildRntpInfo
25206 * Processing Steps:
25208 * @param[in] U8 *rntpPtr
25209 * @param[in] U8 startRb
25210 * @param[in] U8 numRb
25215 PRIVATE S16 rgSCHCmnBuildRntpInfo
25224 PRIVATE S16 rgSCHCmnBuildRntpInfo(cell, rntpPtr, startRb, nmbRb, bw)
25232 U16 rbPtrStartIdx; /* Start Index of Octete Buffer to be filled */
25233 U16 rbPtrEndIdx; /* End Index of Octete Buffer to be filled */
25234 U16 rbBitLoc; /* Bit Location to be set as 1 in the current Byte */
25235 U16 nmbRbPerByte; /* PRB's to be set in the current Byte (in case of multiple Bytes) */
25237 TRC2(rgSCHCmnBuildRntpInfo);
25239 rbPtrStartIdx = (startRb)/8;
25240 rbPtrEndIdx = (startRb + nmbRb)/8;
25242 if (rntpPtr == NULLP)
25244 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
25245 "rgSCHCmnBuildRntpInfo():"
25246 "rntpPtr can't be NULLP (Memory Allocation Failed)");
25250 while(rbPtrStartIdx <= rbPtrEndIdx)
25252 rbBitLoc = (startRb)%8;
25254 /* case 1: startRb and endRb lies in same Byte */
25255 if (rbPtrStartIdx == rbPtrEndIdx)
25257 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25258 | (((1<<nmbRb)-1)<<rbBitLoc);
25261 /* case 2: startRb and endRb lies in different Byte */
25262 if (rbPtrStartIdx != rbPtrEndIdx)
25264 nmbRbPerByte = 8 - rbBitLoc;
25265 nmbRb = nmbRb - nmbRbPerByte;
25266 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25267 | (((1<<nmbRbPerByte)-1)<<rbBitLoc);
25268 startRb = startRb + nmbRbPerByte;
25274 /* dsfr_pal_fixes ** 21-March-2013 ** SKS ** Adding Debug logs */
25276 /* dsfr_pal_fixes ** 25-March-2013 ** SKS ** Adding Debug logs to print RNTP */
25283 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
25287 * Function : rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25289 * Processing Steps:
25291 * @param[in] RgSchCellCb *cell
25292 * @param[in] RgSchDlSf *dlSf
25293 * @param[in] U8 rbStrt
25294 * @param[in] U8 numRb
25299 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25304 RgSchSFRPoolInfo *sfrPool,
25309 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrPool, rbStrt, numRb)
25313 RgSchSFRPoolInfo *sfrPool;
25322 TRC2(rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc);
25323 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25324 sfrPool->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25327 dlSf->type2Start += numRb;
25328 dlSf->bwAlloced += numRb;
25330 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
25332 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
25333 if(FALSE == ue->lteAdvUeCb.rgrLteAdvUeCfg.isUeCellEdge)
25335 if((sfrPool->type2Start >= sfrPool->pwrHiCCRange.startRb) &&
25336 (sfrPool->type2Start + numRb < sfrPool->pwrHiCCRange.endRb))
25338 ue->lteAdvUeCb.isCCUePHigh = TRUE;
25340 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25341 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25344 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25345 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25352 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25353 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25356 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25357 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25362 sfrPool->type2Start += numRb;
25363 sfrPool->bwAlloced += numRb;
25370 * @brief To do DL allocation using TYPE0 RA.
25374 * Function : rgSCHCmnNonDlfsSFRPoolType0Alloc
25376 * Processing Steps:
25377 * - Perform TYPE0 allocation using the RBGs between type0End and type2End.
25378 * - Build the allocation mask as per RBG positioning.
25379 * - Update the allocation parameters.
25381 * @param[in] RgSchCellCb *cell
25382 * @param[in] RgSchDlSf *dlSf
25383 * @param[in] RgSchDlRbAlloc *allocInfo
25388 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc
25392 RgSchSFRPoolInfo *poolInfo,
25393 RgSchDlRbAlloc *allocInfo
25396 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, poolInfo, allocInfo)
25399 RgSchSFRPoolInfo *poolInfo;
25400 RgSchDlRbAlloc *allocInfo;
25403 U32 dlAllocMsk = 0;
25410 TRC2(rgSCHCmnNonDlfsSFRPoolType0Alloc);
25412 if (poolInfo->poolstartRB + poolInfo->bw == dlSf->bw)
25414 if (poolInfo->type0End == dlSf->bw/4)
25416 rbgFiller = dlSf->lstRbgDfct;
25417 /* The last RBG which can be smaller than the RBG size is consedered
25418 * only for the first time allocation of TYPE0 UE */
25419 dlSf->lstRbgDfct = 0;
25423 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25425 /* Abhinav to-do start */
25426 /* MS_FIX for ccpu00123919*/
25427 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25428 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25434 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25436 /* Abhinav to-do end */
25440 /* type0End would have been initially (during subfrm Init) at the bit position
25441 * (cell->noOfRbgs - 1), 0 being the most significant.
25442 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25443 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - poolInfo->type0End));
25444 /* Move backwards the type0End pivot */
25445 poolInfo->type0End -= noRbgs;
25446 /*MS_FIX for ccpu00123919*/
25447 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25448 /* Update the bwAlloced field accordingly */
25449 poolInfo->bwAlloced += noRbs + dlSf->lstRbgDfct;
25450 dlSf->bwAlloced += noRbs + dlSf->lstRbgDfct;
25452 /* Update Type0 Alloc Info */
25453 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25454 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25455 allocInfo->rbsAlloc = noRbs;
25457 /* Update Tb info for each scheduled TB */
25458 iTbs = allocInfo->tbInfo[0].iTbs;
25459 noLyr = allocInfo->tbInfo[0].noLyr;
25460 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25461 * RETX TB Size is same as Init TX TB Size */
25462 if (allocInfo->tbInfo[0].tbCb->txCntr)
25464 allocInfo->tbInfo[0].bytesAlloc =
25465 allocInfo->tbInfo[0].bytesReq;
25469 allocInfo->tbInfo[0].bytesAlloc =
25470 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25473 if (allocInfo->tbInfo[1].schdlngForTb)
25475 iTbs = allocInfo->tbInfo[1].iTbs;
25476 noLyr = allocInfo->tbInfo[1].noLyr;
25477 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25478 * RETX TB Size is same as Init TX TB Size */
25479 if (allocInfo->tbInfo[1].tbCb->txCntr)
25481 allocInfo->tbInfo[1].bytesAlloc =
25482 allocInfo->tbInfo[1].bytesReq;
25486 allocInfo->tbInfo[1].bytesAlloc =
25487 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25491 /* The last RBG which can be smaller than the RBG size is consedered
25492 * only for the first time allocation of TYPE0 UE */
25493 dlSf->lstRbgDfct = 0;
25498 * @brief Computes RNTP Info for a subframe.
25502 * Function : rgSCHCmnNonDlfsDsfrRntpComp
25504 * Processing Steps:
25505 * - Computes RNTP info from individual pools.
25507 * @param[in] RgSchDlSf *dlSf
25513 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp
25519 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp(cell, dlSf)
25524 PRIVATE U16 samples = 0;
25526 U16 bwBytes = (dlSf->bw-1)/8;
25527 RgrLoadInfIndInfo *rgrLoadInf;
25531 TRC2(rgSCHCmnNonDlfsDsfrRntpComp);
25533 len = (dlSf->bw % 8 == 0) ? dlSf->bw/8 : dlSf->bw/8 + 1;
25535 /* RNTP info is ORed every TTI and the sample is stored in cell control block */
25536 for(i = 0; i <= bwBytes; i++)
25538 cell->rntpAggrInfo.val[i] |= dlSf->rntpInfo.val[i];
25540 samples = samples + 1;
25541 /* After every 1000 ms, the RNTP info will be sent to application to be further sent to all neighbouring eNB
25542 informing them about the load indication for cell edge users */
25543 if(RG_SCH_MAX_RNTP_SAMPLES == samples)
25546 ret = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&rgrLoadInf,
25547 sizeof(RgrLoadInfIndInfo));
25550 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
25551 "allocate memory for sending LoadInfo");
25555 rgrLoadInf->u.rntpInfo.pres = cell->rntpAggrInfo.pres;
25556 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25557 rgrLoadInf->u.rntpInfo.len = len;
25559 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25560 rgrLoadInf->u.rntpInfo.val = cell->rntpAggrInfo.val;
25561 rgrLoadInf->cellId = cell->cellId;
25563 /* dsfr_pal_fixes ** 22-March-2013 ** SKS */
25564 rgrLoadInf->bw = dlSf->bw;
25565 rgrLoadInf->type = RGR_SFR;
25567 ret = rgSCHUtlRgrLoadInfInd(cell, rgrLoadInf);
25570 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsDsfrRntpComp():"
25571 "rgSCHUtlRgrLoadInfInd() returned RFAILED");
25574 cmMemset(cell->rntpAggrInfo.val,0,len);
25578 /* LTE_ADV_FLAG_REMOVED_END */
25580 /* LTE_ADV_FLAG_REMOVED_START */
25582 * @brief Performs RB allocation per UE from a pool.
25586 * Function : rgSCHCmnSFRNonDlfsUeRbAlloc
25588 * Processing Steps:
25589 * - Allocate consecutively available RBs.
25591 * @param[in] RgSchCellCb *cell
25592 * @param[in] RgSchUeCb *ue
25593 * @param[in] RgSchDlSf *dlSf
25594 * @param[out] U8 *isDlBwAvail
25601 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc
25609 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25617 RgSchDlRbAlloc *allocInfo;
25618 RgSchCmnDlUe *dlUe;
25620 RgSchSFRPoolInfo *sfrpoolInfo = NULLP;
25622 TRC2(rgSCHCmnSFRNonDlfsUeRbAlloc);
25624 isUECellEdge = RG_SCH_CMN_IS_UE_CELL_EDGE(ue);
25626 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25627 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25628 *isDlBwAvail = TRUE;
25630 /*Find which pool is available for this UE*/
25631 if (rgSCHCmnNonDlfsSFRBwAvlbl(cell, &sfrpoolInfo, dlSf, allocInfo, isUECellEdge) != TRUE)
25633 /* SFR_FIX - If this is CE UE there may be BW available in CC Pool
25634 So CC UEs will be scheduled */
25637 *isDlBwAvail = TRUE;
25641 *isDlBwAvail = FALSE;
25646 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX || dlUe->proc->tbInfo[1].isAckNackDtx)
25648 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25652 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25655 if (!(allocInfo->pdcch))
25657 /* Returning ROK since PDCCH might be available for another UE and further allocations could be done */
25662 allocInfo->rnti = ue->ueId;
25665 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
25667 allocInfo->allocInfo.raType2.isLocal = TRUE;
25668 /* rg004.201 patch - ccpu00109921 fix end */
25669 /* MS_FIX for ccpu00123918*/
25670 allocInfo->allocInfo.raType2.rbStart = (U8)sfrpoolInfo->type2Start;
25671 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25672 /* rg007.201 - Changes for MIMO feature addition */
25673 /* rg008.201 - Removed dependency on MIMO compile-time flag */
25674 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrpoolInfo, \
25675 allocInfo->allocInfo.raType2.rbStart, \
25676 allocInfo->allocInfo.raType2.numRb);
25677 allocInfo->rbsAlloc = allocInfo->rbsReq;
25678 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25680 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
25682 rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, sfrpoolInfo, allocInfo);
25686 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,0);
25687 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
25689 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,1);
25694 #if defined(LTEMAC_SPS)
25695 /* Update the sub-frame with new allocation */
25696 dlSf->bwAlloced += allocInfo->rbsReq;
25701 /* LTE_ADV_FLAG_REMOVED_END */
25702 #endif /* LTE_TDD */
25705 * @brief Performs RB allocation per UE for frequency non-selective cell.
25709 * Function : rgSCHCmnNonDlfsUeRbAlloc
25711 * Processing Steps:
25712 * - Allocate consecutively available RBs.
25714 * @param[in] RgSchCellCb *cell
25715 * @param[in] RgSchUeCb *ue
25716 * @param[in] RgSchDlSf *dlSf
25717 * @param[out] U8 *isDlBwAvail
25724 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc
25732 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25739 RgSchDlRbAlloc *allocInfo;
25740 RgSchCmnDlUe *dlUe;
25744 TRC2(rgSCHCmnNonDlfsUeRbAlloc);
25747 RgSch5gtfUeCb *ue5gtfCb = &(ue->ue5gtfCb);
25748 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[ue5gtfCb->BeamId]);
25750 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25751 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25752 *isDlBwAvail = TRUE;
25754 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
25756 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25757 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
25759 printf("5GTF_ERROR vrbg allocated > 25\n");
25763 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX
25764 || dlUe->proc->tbInfo[1].isAckNackDtx)
25766 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25770 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25772 if (!(allocInfo->pdcch))
25774 /* Returning ROK since PDCCH might be available for another UE and
25775 * further allocations could be done */
25776 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25777 "5GTF_ERROR : PDCCH allocation failed :ue (%u)",
25779 printf("5GTF_ERROR PDCCH allocation failed\n");
25783 //maxPrb = RGSCH_MIN((allocInfo->vrbgReq * MAX_5GTF_VRBG_SIZE), ue5gtfCb->maxPrb);
25784 //maxPrb = RGSCH_MIN(maxPrb,
25785 //((beamInfo->totVrbgAvail - beamInfo->vrbgStart)* MAX_5GTF_VRBG_SIZE)));
25786 //TODO_SID Need to check for vrbg available after scheduling for same beam.
25787 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
25788 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
25789 //TODO_SID: Setting for max TP
25790 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
25791 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
25792 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
25793 allocInfo->tbInfo[0].tbCb->dlGrnt.SCID = 0;
25794 allocInfo->tbInfo[0].tbCb->dlGrnt.dciFormat = allocInfo->dciFormat;
25795 //Filling temporarily
25796 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
25797 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
25799 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25800 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25801 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25809 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
25813 * Function : rgSCHCmnNonDlfsCcchSduAlloc
25815 * Processing Steps:
25816 * - For each element in the list, Call rgSCHCmnNonDlfsCcchSduRbAlloc().
25817 * - If allocation is successful, add the ueCb to scheduled list of CCCH
25819 * - else, add UeCb to non-scheduled list.
25821 * @param[in] RgSchCellCb *cell
25822 * @param[in, out] RgSchCmnCcchSduRbAlloc *allocInfo
25823 * @param[in] U8 isRetx
25828 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc
25831 RgSchCmnCcchSduRbAlloc *allocInfo,
25835 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc(cell, allocInfo, isRetx)
25837 RgSchCmnCcchSduRbAlloc *allocInfo;
25842 CmLListCp *ccchSduLst = NULLP;
25843 CmLListCp *schdCcchSduLst = NULLP;
25844 CmLListCp *nonSchdCcchSduLst = NULLP;
25845 CmLList *schdLnkNode = NULLP;
25846 CmLList *toBeSchdLnk = NULLP;
25847 RgSchDlSf *dlSf = allocInfo->ccchSduDlSf;
25848 RgSchUeCb *ueCb = NULLP;
25849 RgSchDlHqProcCb *hqP = NULLP;
25850 TRC2(rgSCHCmnNonDlfsCcchSduAlloc);
25854 /* Initialize re-transmitting lists */
25855 ccchSduLst = &(allocInfo->ccchSduRetxLst);
25856 schdCcchSduLst = &(allocInfo->schdCcchSduRetxLst);
25857 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduRetxLst);
25861 /* Initialize transmitting lists */
25862 ccchSduLst = &(allocInfo->ccchSduTxLst);
25863 schdCcchSduLst = &(allocInfo->schdCcchSduTxLst);
25864 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduTxLst);
25867 /* Perform allocaations for the list */
25868 toBeSchdLnk = cmLListFirst(ccchSduLst);
25869 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
25871 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25872 ueCb = hqP->hqE->ue;
25873 schdLnkNode = &hqP->schdLstLnk;
25874 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25875 ret = rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf);
25878 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
25879 * list and return */
25882 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25883 ueCb = hqP->hqE->ue;
25884 schdLnkNode = &hqP->schdLstLnk;
25885 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25886 cmLListAdd2Tail(nonSchdCcchSduLst, schdLnkNode);
25887 toBeSchdLnk = toBeSchdLnk->next;
25888 } while(toBeSchdLnk);
25892 /* Allocation successful: Add UE to the scheduled list */
25893 cmLListAdd2Tail(schdCcchSduLst, schdLnkNode);
25901 * @brief Performs RB allocation for CcchSdu for frequency non-selective cell.
25905 * Function : rgSCHCmnNonDlfsCcchSduRbAlloc
25907 * Processing Steps:
25909 * - Allocate consecutively available RBs
25911 * @param[in] RgSchCellCb *cell
25912 * @param[in] RgSchUeCb *ueCb
25913 * @param[in] RgSchDlSf *dlSf
25919 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc
25926 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf)
25932 RgSchDlRbAlloc *allocInfo;
25933 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
25935 TRC2(rgSCHCmnNonDlfsCcchSduRbAlloc);
25938 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb,cell);
25940 /* [ccpu00138802]-MOD-If Bw is less than required, return fail
25941 It will be allocated in next TTI */
25943 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
25944 (dlSf->bwAlloced == dlSf->bw))
25946 if((dlSf->bwAlloced == dlSf->bw) ||
25947 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
25952 /* Retrieve PDCCH */
25953 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
25954 if (ueDl->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
25956 /* allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, dlSf, y, ueDl->cqi,
25957 * TFU_DCI_FORMAT_1A, TRUE);*/
25958 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, TRUE);
25962 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE);
25964 if (!(allocInfo->pdcch))
25966 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
25970 /* Update allocation information */
25971 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
25972 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
25973 allocInfo->allocInfo.raType2.isLocal = TRUE;
25975 /*Fix for ccpu00123918*/
25976 /* Push this harq process back to the free queue */
25977 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
25978 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25979 allocInfo->rbsAlloc = allocInfo->rbsReq;
25980 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25981 /* Update the sub-frame with new allocation */
25983 /* LTE_ADV_FLAG_REMOVED_START */
25985 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
25987 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf,
25988 allocInfo->allocInfo.raType2.rbStart,
25989 allocInfo->allocInfo.raType2.numRb);
25992 #endif /* end of ifndef LTE_TDD*/
25994 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf,
25995 allocInfo->allocInfo.raType2.rbStart,
25996 allocInfo->allocInfo.raType2.numRb);
25999 /* LTE_ADV_FLAG_REMOVED_END */
26000 /* ccpu00131941 - bwAlloced is updated from SPS bandwidth */
26008 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
26012 * Function : rgSCHCmnNonDlfsMsg4RbAlloc
26014 * Processing Steps:
26016 * - Allocate consecutively available RBs
26018 * @param[in] RgSchCellCb *cell
26019 * @param[in] RgSchRaCb *raCb
26020 * @param[in] RgSchDlSf *dlSf
26026 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc
26033 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf)
26039 RgSchDlRbAlloc *allocInfo;
26040 TRC2(rgSCHCmnNonDlfsMsg4RbAlloc);
26043 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_RACB(raCb);
26046 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
26047 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
26049 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
26050 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
26052 printf("5GTF_ERROR vrbg allocated > 25\n");
26057 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
26058 (dlSf->bwAlloced == dlSf->bw))
26060 if((dlSf->bwAlloced == dlSf->bw) ||
26061 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
26068 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
26069 if (raCb->dlHqE->msg4Proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
26071 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, TRUE);
26075 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, FALSE);
26077 if (!(allocInfo->pdcch))
26079 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
26084 /* SR_RACH_STATS : MSG4 TX Failed */
26085 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26087 /* Update allocation information */
26088 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
26089 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
26090 allocInfo->allocInfo.raType2.isLocal = TRUE;
26093 /*Fix for ccpu00123918*/
26094 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
26095 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
26096 /* LTE_ADV_FLAG_REMOVED_START */
26098 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
26100 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
26101 allocInfo->allocInfo.raType2.rbStart, \
26102 allocInfo->allocInfo.raType2.numRb);
26105 #endif /* end of ifndef LTE_TDD */
26107 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
26108 allocInfo->allocInfo.raType2.rbStart, \
26109 allocInfo->allocInfo.raType2.numRb);
26111 /* LTE_ADV_FLAG_REMOVED_END */
26113 allocInfo->rbsAlloc = allocInfo->rbsReq;
26114 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26118 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26120 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
26121 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
26123 /* Update allocation information */
26124 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26126 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
26127 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
26128 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
26130 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
26131 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
26134 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26135 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26136 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26144 * @brief Performs RB allocation for Msg4 lists of frequency non-selective cell.
26148 * Function : rgSCHCmnNonDlfsMsg4Alloc
26150 * Processing Steps:
26151 * - For each element in the list, Call rgSCHCmnNonDlfsMsg4RbAlloc().
26152 * - If allocation is successful, add the raCb to scheduled list of MSG4.
26153 * - else, add RaCb to non-scheduled list.
26155 * @param[in] RgSchCellCb *cell
26156 * @param[in, out] RgSchCmnMsg4RbAlloc *allocInfo
26157 * @param[in] U8 isRetx
26162 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc
26165 RgSchCmnMsg4RbAlloc *allocInfo,
26169 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc(cell, allocInfo, isRetx)
26171 RgSchCmnMsg4RbAlloc *allocInfo;
26176 CmLListCp *msg4Lst = NULLP;
26177 CmLListCp *schdMsg4Lst = NULLP;
26178 CmLListCp *nonSchdMsg4Lst = NULLP;
26179 CmLList *schdLnkNode = NULLP;
26180 CmLList *toBeSchdLnk = NULLP;
26181 RgSchDlSf *dlSf = allocInfo->msg4DlSf;
26182 RgSchRaCb *raCb = NULLP;
26183 RgSchDlHqProcCb *hqP = NULLP;
26184 TRC2(rgSCHCmnNonDlfsMsg4Alloc);
26188 /* Initialize re-transmitting lists */
26189 msg4Lst = &(allocInfo->msg4RetxLst);
26190 schdMsg4Lst = &(allocInfo->schdMsg4RetxLst);
26191 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4RetxLst);
26195 /* Initialize transmitting lists */
26196 msg4Lst = &(allocInfo->msg4TxLst);
26197 schdMsg4Lst = &(allocInfo->schdMsg4TxLst);
26198 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4TxLst);
26201 /* Perform allocaations for the list */
26202 toBeSchdLnk = cmLListFirst(msg4Lst);
26203 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26205 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26206 raCb = hqP->hqE->raCb;
26207 schdLnkNode = &hqP->schdLstLnk;
26208 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26209 ret = rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf);
26212 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
26213 * list and return */
26216 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26217 raCb = hqP->hqE->raCb;
26218 schdLnkNode = &hqP->schdLstLnk;
26219 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26220 cmLListAdd2Tail(nonSchdMsg4Lst, schdLnkNode);
26221 toBeSchdLnk = toBeSchdLnk->next;
26222 } while(toBeSchdLnk);
26226 /* Allocation successful: Add UE to the scheduled list */
26227 cmLListAdd2Tail(schdMsg4Lst, schdLnkNode);
26238 * @brief Performs RB allocation for the list of UEs of a frequency
26239 * non-selective cell.
26243 * Function : rgSCHCmnNonDlfsDedRbAlloc
26245 * Processing Steps:
26246 * - For each element in the list, Call rgSCHCmnNonDlfsUeRbAlloc().
26247 * - If allocation is successful, add the ueCb to scheduled list of UEs.
26248 * - else, add ueCb to non-scheduled list of UEs.
26250 * @param[in] RgSchCellCb *cell
26251 * @param[in, out] RgSchCmnUeRbAlloc *allocInfo
26252 * @param[in] CmLListCp *ueLst,
26253 * @param[in, out] CmLListCp *schdHqPLst,
26254 * @param[in, out] CmLListCp *nonSchdHqPLst
26259 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc
26262 RgSchCmnUeRbAlloc *allocInfo,
26264 CmLListCp *schdHqPLst,
26265 CmLListCp *nonSchdHqPLst
26268 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc(cell, allocInfo, ueLst,
26269 schdHqPLst, nonSchdHqPLst)
26271 RgSchCmnUeRbAlloc *allocInfo;
26273 CmLListCp *schdHqPLst;
26274 CmLListCp *nonSchdHqPLst;
26278 CmLList *schdLnkNode = NULLP;
26279 CmLList *toBeSchdLnk = NULLP;
26280 RgSchDlSf *dlSf = allocInfo->dedDlSf;
26281 RgSchUeCb *ue = NULLP;
26282 RgSchDlHqProcCb *hqP = NULLP;
26284 TRC2(rgSCHCmnNonDlfsDedRbAlloc);
26287 /* Perform allocaations for the list */
26288 toBeSchdLnk = cmLListFirst(ueLst);
26289 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26291 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26293 schdLnkNode = &hqP->schdLstLnk;
26294 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26296 ret = rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, &isDlBwAvail);
26299 /* Allocation failed: Add remaining UEs to non-scheduled
26300 * list and return */
26303 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26305 schdLnkNode = &hqP->schdLstLnk;
26306 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26307 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26308 toBeSchdLnk = toBeSchdLnk->next;
26309 } while(toBeSchdLnk);
26315 #if defined (TENB_STATS) && defined (RG_5GTF)
26316 cell->tenbStats->sch.dl5gtfRbAllocPass++;
26318 /* Allocation successful: Add UE to the scheduled list */
26319 cmLListAdd2Tail(schdHqPLst, schdLnkNode);
26323 #if defined (TENB_STATS) && defined (RG_5GTF)
26324 cell->tenbStats->sch.dl5gtfRbAllocFail++;
26326 /* Allocation failed : Add UE to the non-scheduled list */
26327 printf("5GTF_ERROR Dl rb alloc failed adding nonSchdHqPLst\n");
26328 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26336 * @brief Handles RB allocation for frequency non-selective cell.
26340 * Function : rgSCHCmnNonDlfsRbAlloc
26342 * Invoking Module Processing:
26343 * - SCH shall invoke this if downlink frequency selective is disabled for
26344 * the cell for RB allocation.
26345 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
26346 * estimate and subframe for each allocation to be made to SCH.
26348 * Processing Steps:
26349 * - Allocate sequentially for common channels.
26350 * - For transmitting and re-transmitting UE list.
26352 * - Perform wide-band allocations for UE in increasing order of
26354 * - Determine Imcs for the allocation.
26355 * - Determine RA type.
26356 * - Determine DCI format.
26358 * @param[in] RgSchCellCb *cell
26359 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
26364 PUBLIC Void rgSCHCmnNonDlfsRbAlloc
26367 RgSchCmnDlRbAllocInfo *allocInfo
26370 PUBLIC Void rgSCHCmnNonDlfsRbAlloc(cell, allocInfo)
26372 RgSchCmnDlRbAllocInfo *allocInfo;
26376 RgSchDlRbAlloc *reqAllocInfo;
26377 TRC2(rgSCHCmnNonDlfsRbAlloc);
26379 /* Allocate for MSG4 retransmissions */
26380 if (allocInfo->msg4Alloc.msg4RetxLst.count)
26382 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc RetxLst\n");
26383 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), TRUE);
26386 /* Allocate for MSG4 transmissions */
26387 /* Assuming all the nodes in the list need allocations: rbsReq is valid */
26388 if (allocInfo->msg4Alloc.msg4TxLst.count)
26390 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc txLst\n");
26391 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), FALSE);
26394 /* Allocate for CCCH SDU (received after guard timer expiry)
26395 * retransmissions */
26396 if (allocInfo->ccchSduAlloc.ccchSduRetxLst.count)
26398 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26399 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), TRUE);
26402 /* Allocate for CCCD SDU transmissions */
26403 /* Allocate for CCCH SDU (received after guard timer expiry) transmissions */
26404 if (allocInfo->ccchSduAlloc.ccchSduTxLst.count)
26406 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26407 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), FALSE);
26411 /* Allocate for Random access response */
26412 for (raRspCnt = 0; raRspCnt < RG_SCH_CMN_MAX_CMN_PDCCH; ++raRspCnt)
26414 /* Assuming that the requests will be filled in sequentially */
26415 reqAllocInfo = &(allocInfo->raRspAlloc[raRspCnt]);
26416 if (!reqAllocInfo->rbsReq)
26420 printf("5GTF_ERROR calling RAR rgSCHCmnNonDlfsCmnRbAlloc\n");
26421 // if ((rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo)) != ROK)
26422 if ((rgSCHCmnNonDlfsCmnRbAllocRar(cell, reqAllocInfo)) != ROK)
26428 /* Allocate for RETX+TX UEs */
26429 if(allocInfo->dedAlloc.txRetxHqPLst.count)
26431 printf("5GTF_ERROR TX RETX rgSCHCmnNonDlfsDedRbAlloc\n");
26432 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26433 &(allocInfo->dedAlloc.txRetxHqPLst),
26434 &(allocInfo->dedAlloc.schdTxRetxHqPLst),
26435 &(allocInfo->dedAlloc.nonSchdTxRetxHqPLst));
26438 if((allocInfo->dedAlloc.retxHqPLst.count))
26440 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26441 &(allocInfo->dedAlloc.retxHqPLst),
26442 &(allocInfo->dedAlloc.schdRetxHqPLst),
26443 &(allocInfo->dedAlloc.nonSchdRetxHqPLst));
26446 /* Allocate for transmitting UEs */
26447 if((allocInfo->dedAlloc.txHqPLst.count))
26449 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26450 &(allocInfo->dedAlloc.txHqPLst),
26451 &(allocInfo->dedAlloc.schdTxHqPLst),
26452 &(allocInfo->dedAlloc.nonSchdTxHqPLst));
26455 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cell);
26456 if ((allocInfo->dedAlloc.txRetxHqPLst.count +
26457 allocInfo->dedAlloc.retxHqPLst.count +
26458 allocInfo->dedAlloc.txHqPLst.count) >
26459 cmnCell->dl.maxUePerDlSf)
26461 #ifndef ALIGN_64BIT
26462 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26463 " scheduler exceed maximumUePerDlSf(%u)tx-retx %ld retx %ld tx %ld\n",
26464 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26465 allocInfo->dedAlloc.retxHqPLst.count,
26466 allocInfo->dedAlloc.txHqPLst.count));
26468 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26469 " scheduler exceed maximumUePerDlSf(%u)tx-retx %d retx %d tx %d\n",
26470 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26471 allocInfo->dedAlloc.retxHqPLst.count,
26472 allocInfo->dedAlloc.txHqPLst.count));
26477 /* LTE_ADV_FLAG_REMOVED_START */
26478 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
26480 printf("5GTF_ERROR RETX rgSCHCmnNonDlfsDsfrRntpComp\n");
26481 rgSCHCmnNonDlfsDsfrRntpComp(cell, allocInfo->dedAlloc.dedDlSf);
26483 /* LTE_ADV_FLAG_REMOVED_END */
26484 #endif /* LTE_TDD */
26488 /***********************************************************
26490 * Func : rgSCHCmnCalcRiv
26492 * Desc : This function calculates RIV.
26498 * File : rg_sch_utl.c
26500 **********************************************************/
26503 PUBLIC U32 rgSCHCmnCalcRiv
26510 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26517 PUBLIC U32 rgSCHCmnCalcRiv
26524 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26531 U8 numRbMinus1 = numRb - 1;
26534 TRC2(rgSCHCmnCalcRiv);
26536 if (numRbMinus1 <= bw/2)
26538 riv = bw * numRbMinus1 + rbStart;
26542 riv = bw * (bw - numRbMinus1) + (bw - rbStart - 1);
26545 } /* rgSCHCmnCalcRiv */
26549 * @brief This function allocates and copies the RACH response scheduling
26550 * related information into cell control block.
26554 * Function: rgSCHCmnDlCpyRachInfo
26555 * Purpose: This function allocates and copies the RACH response
26556 * scheduling related information into cell control block
26557 * for each DL subframe.
26560 * Invoked by: Scheduler
26562 * @param[in] RgSchCellCb* cell
26563 * @param[in] RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES]
26564 * @param[in] U8 raArrSz
26569 PRIVATE S16 rgSCHCmnDlCpyRachInfo
26572 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
26576 PRIVATE S16 rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz)
26578 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES];
26582 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
26591 TRC2(rgSCHCmnDlCpyRachInfo);
26593 /* Allocate RACH response information for each DL
26594 * subframe in a radio frame */
26595 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cell->rachRspLst,
26596 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1] *
26597 sizeof(RgSchTddRachRspLst));
26603 for(sfnIdx=raArrSz-1; sfnIdx>=0; sfnIdx--)
26605 for(subfrmIdx=0; subfrmIdx < RGSCH_NUM_SUB_FRAMES; subfrmIdx++)
26607 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
26608 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
26613 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx],subfrmIdx);
26615 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26617 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddNumDlSubfrmTbl[ulDlCfgIdx],subfrmIdx);
26618 sfNum = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][subfrmIdx]-1;
26619 numRfs = cell->rachRspLst[sfNum].numRadiofrms;
26620 /* For each DL subframe in which RACH response can
26621 * be sent is updated */
26624 cell->rachRspLst[sfNum].rachRsp[numRfs].sfnOffset =
26625 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset;
26626 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26628 cell->rachRspLst[sfNum].rachRsp[numRfs].\
26629 subframe[sfcount] =
26630 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].\
26633 cell->rachRspLst[sfNum].rachRsp[numRfs].numSubfrms =
26634 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26635 cell->rachRspLst[sfNum].numRadiofrms++;
26638 /* Copy the subframes to be deleted at ths subframe */
26640 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26643 cell->rachRspLst[sfNum].delInfo.sfnOffset =
26644 rachRspLst[sfnIdx][subfrmIdx].delInfo.sfnOffset;
26645 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26647 cell->rachRspLst[sfNum].delInfo.subframe[sfcount] =
26648 rachRspLst[sfnIdx][subfrmIdx].delInfo.subframe[sfcount];
26650 cell->rachRspLst[sfNum].delInfo.numSubfrms =
26651 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26659 * @brief This function determines the iTbs based on the new CFI,
26660 * CQI and BLER based delta iTbs
26664 * Function: rgSchCmnFetchItbs
26665 * Purpose: Fetch the new iTbs when CFI changes.
26667 * @param[in] RgSchCellCb *cell
26668 * @param[in] RgSchCmnDlUe *ueDl
26669 * @param[in] U8 cqi
26676 PRIVATE S32 rgSchCmnFetchItbs
26679 RgSchCmnDlUe *ueDl,
26687 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, subFrm, cqi, cfi, cwIdx, noLyr)
26689 RgSchCmnDlUe *ueDl;
26698 PRIVATE S32 rgSchCmnFetchItbs
26701 RgSchCmnDlUe *ueDl,
26708 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, cqi, cfi, cwIdx, noLyr)
26710 RgSchCmnDlUe *ueDl;
26719 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
26722 TRC2(rgSchCmnFetchItbs);
26725 /* Special Handling for Spl Sf when CFI is 3 as
26726 * CFI in Spl Sf will be max 2 */
26727 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
26729 if((cellDl->currCfi == 3) ||
26730 ((cell->bwCfg.dlTotalBw <= 10) && (cellDl->currCfi == 1)))
26732 /* Use CFI 2 in this case */
26733 iTbs = (ueDl->laCb[cwIdx].deltaiTbs +
26734 ((*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][2]))[cqi])* 100)/100;
26736 RG_SCH_CHK_ITBS_RANGE(iTbs, RGSCH_NUM_ITBS - 1);
26740 iTbs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1];
26742 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26744 else /* CFI Changed. Update with new iTbs Reset the BLER*/
26747 S32 tmpiTbs = (*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][cfi]))[cqi];
26749 iTbs = (ueDl->laCb[cwIdx].deltaiTbs + tmpiTbs*100)/100;
26751 RG_SCH_CHK_ITBS_RANGE(iTbs, tmpiTbs);
26753 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26755 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1] = iTbs;
26757 ueDl->lastCfi = cfi;
26758 ueDl->laCb[cwIdx].deltaiTbs = 0;
26765 * @brief This function determines the RBs and Bytes required for BO
26766 * transmission for UEs configured with TM 1/2/6/7.
26770 * Function: rgSCHCmnDlAllocTxRb1Tb1Cw
26771 * Purpose: Allocate TB1 on CW1.
26773 * Reference Parameter effBo is filled with alloced bytes.
26774 * Returns RFAILED if BO not satisfied at all.
26776 * Invoked by: rgSCHCmnDlAllocTxRbTM1/2/6/7
26778 * @param[in] RgSchCellCb *cell
26779 * @param[in] RgSchDlSf *subFrm
26780 * @param[in] RgSchUeCb *ue
26781 * @param[in] U32 bo
26782 * @param[out] U32 *effBo
26783 * @param[in] RgSchDlHqProcCb *proc
26784 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26789 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw
26796 RgSchDlHqProcCb *proc,
26797 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26800 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26806 RgSchDlHqProcCb *proc;
26807 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26810 RgSchDlRbAlloc *allocInfo;
26813 TRC2(rgSCHCmnDlAllocTxRb1Tb1Cw);
26816 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26818 if (ue->ue5gtfCb.rank == 2)
26820 allocInfo->dciFormat = TFU_DCI_FORMAT_B2;
26824 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26827 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26828 allocInfo->raType);
26830 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
26831 bo, &numRb, effBo);
26832 if (ret == RFAILED)
26834 /* If allocation couldn't be made then return */
26837 /* Adding UE to RbAllocInfo TX Lst */
26838 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
26839 /* Fill UE alloc Info */
26840 allocInfo->rbsReq = numRb;
26841 allocInfo->dlSf = subFrm;
26843 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26851 * @brief This function determines the RBs and Bytes required for BO
26852 * retransmission for UEs configured with TM 1/2/6/7.
26856 * Function: rgSCHCmnDlAllocRetxRb1Tb1Cw
26857 * Purpose: Allocate TB1 on CW1.
26859 * Reference Parameter effBo is filled with alloced bytes.
26860 * Returns RFAILED if BO not satisfied at all.
26862 * Invoked by: rgSCHCmnDlAllocRetxRbTM1/2/6/7
26864 * @param[in] RgSchCellCb *cell
26865 * @param[in] RgSchDlSf *subFrm
26866 * @param[in] RgSchUeCb *ue
26867 * @param[in] U32 bo
26868 * @param[out] U32 *effBo
26869 * @param[in] RgSchDlHqProcCb *proc
26870 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26875 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw
26882 RgSchDlHqProcCb *proc,
26883 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26886 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26892 RgSchDlHqProcCb *proc;
26893 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26896 RgSchDlRbAlloc *allocInfo;
26899 TRC2(rgSCHCmnDlAllocRetxRb1Tb1Cw);
26902 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26905 /* 5GTF: RETX DCI format same as TX */
26906 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26907 &allocInfo->raType);
26910 /* Get the Allocation in terms of RBs that are required for
26911 * this retx of TB1 */
26912 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
26914 if (ret == RFAILED)
26916 /* Allocation couldn't be made for Retx */
26917 /* Fix : syed If TxRetx allocation failed then add the UE along with the proc
26918 * to the nonSchdTxRetxUeLst and let spfc scheduler take care of it during
26920 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
26923 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
26924 /* Fill UE alloc Info */
26925 allocInfo->rbsReq = numRb;
26926 allocInfo->dlSf = subFrm;
26928 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26936 * @brief This function determines the RBs and Bytes required for BO
26937 * transmission for UEs configured with TM 2.
26941 * Function: rgSCHCmnDlAllocTxRbTM1
26944 * Reference Parameter effBo is filled with alloced bytes.
26945 * Returns RFAILED if BO not satisfied at all.
26947 * Invoked by: rgSCHCmnDlAllocTxRb
26949 * @param[in] RgSchCellCb *cell
26950 * @param[in] RgSchDlSf *subFrm
26951 * @param[in] RgSchUeCb *ue
26952 * @param[in] U32 bo
26953 * @param[out] U32 *effBo
26954 * @param[in] RgSchDlHqProcCb *proc
26955 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26960 PRIVATE Void rgSCHCmnDlAllocTxRbTM1
26967 RgSchDlHqProcCb *proc,
26968 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26971 PRIVATE Void rgSCHCmnDlAllocTxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26977 RgSchDlHqProcCb *proc;
26978 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26981 TRC2(rgSCHCmnDlAllocTxRbTM1);
26982 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
26988 * @brief This function determines the RBs and Bytes required for BO
26989 * retransmission for UEs configured with TM 2.
26993 * Function: rgSCHCmnDlAllocRetxRbTM1
26996 * Reference Parameter effBo is filled with alloced bytes.
26997 * Returns RFAILED if BO not satisfied at all.
26999 * Invoked by: rgSCHCmnDlAllocRetxRb
27001 * @param[in] RgSchCellCb *cell
27002 * @param[in] RgSchDlSf *subFrm
27003 * @param[in] RgSchUeCb *ue
27004 * @param[in] U32 bo
27005 * @param[out] U32 *effBo
27006 * @param[in] RgSchDlHqProcCb *proc
27007 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27012 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1
27019 RgSchDlHqProcCb *proc,
27020 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27023 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27029 RgSchDlHqProcCb *proc;
27030 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27033 TRC2(rgSCHCmnDlAllocRetxRbTM1);
27034 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27040 * @brief This function determines the RBs and Bytes required for BO
27041 * transmission for UEs configured with TM 2.
27045 * Function: rgSCHCmnDlAllocTxRbTM2
27048 * Reference Parameter effBo is filled with alloced bytes.
27049 * Returns RFAILED if BO not satisfied at all.
27051 * Invoked by: rgSCHCmnDlAllocTxRb
27053 * @param[in] RgSchCellCb *cell
27054 * @param[in] RgSchDlSf *subFrm
27055 * @param[in] RgSchUeCb *ue
27056 * @param[in] U32 bo
27057 * @param[out] U32 *effBo
27058 * @param[in] RgSchDlHqProcCb *proc
27059 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27064 PRIVATE Void rgSCHCmnDlAllocTxRbTM2
27071 RgSchDlHqProcCb *proc,
27072 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27075 PRIVATE Void rgSCHCmnDlAllocTxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27081 RgSchDlHqProcCb *proc;
27082 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27085 TRC2(rgSCHCmnDlAllocTxRbTM2);
27086 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27092 * @brief This function determines the RBs and Bytes required for BO
27093 * retransmission for UEs configured with TM 2.
27097 * Function: rgSCHCmnDlAllocRetxRbTM2
27100 * Reference Parameter effBo is filled with alloced bytes.
27101 * Returns RFAILED if BO not satisfied at all.
27103 * Invoked by: rgSCHCmnDlAllocRetxRb
27105 * @param[in] RgSchCellCb *cell
27106 * @param[in] RgSchDlSf *subFrm
27107 * @param[in] RgSchUeCb *ue
27108 * @param[in] U32 bo
27109 * @param[out] U32 *effBo
27110 * @param[in] RgSchDlHqProcCb *proc
27111 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27116 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2
27123 RgSchDlHqProcCb *proc,
27124 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27127 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27133 RgSchDlHqProcCb *proc;
27134 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27137 TRC2(rgSCHCmnDlAllocRetxRbTM2);
27138 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27144 * @brief This function determines the RBs and Bytes required for BO
27145 * transmission for UEs configured with TM 3.
27149 * Function: rgSCHCmnDlAllocTxRbTM3
27152 * Reference Parameter effBo is filled with alloced bytes.
27153 * Returns RFAILED if BO not satisfied at all.
27155 * Invoked by: rgSCHCmnDlAllocTxRb
27157 * @param[in] RgSchCellCb *cell
27158 * @param[in] RgSchDlSf *subFrm
27159 * @param[in] RgSchUeCb *ue
27160 * @param[in] U32 bo
27161 * @param[out] U32 *effBo
27162 * @param[in] RgSchDlHqProcCb *proc
27163 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27168 PRIVATE Void rgSCHCmnDlAllocTxRbTM3
27175 RgSchDlHqProcCb *proc,
27176 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27179 PRIVATE Void rgSCHCmnDlAllocTxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27185 RgSchDlHqProcCb *proc;
27186 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27190 TRC2(rgSCHCmnDlAllocTxRbTM3);
27192 /* Both TBs free for TX allocation */
27193 rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo,\
27194 proc, cellWdAllocInfo);
27201 * @brief This function determines the RBs and Bytes required for BO
27202 * retransmission for UEs configured with TM 3.
27206 * Function: rgSCHCmnDlAllocRetxRbTM3
27209 * Reference Parameter effBo is filled with alloced bytes.
27210 * Returns RFAILED if BO not satisfied at all.
27212 * Invoked by: rgSCHCmnDlAllocRetxRb
27214 * @param[in] RgSchCellCb *cell
27215 * @param[in] RgSchDlSf *subFrm
27216 * @param[in] RgSchUeCb *ue
27217 * @param[in] U32 bo
27218 * @param[out] U32 *effBo
27219 * @param[in] RgSchDlHqProcCb *proc
27220 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27225 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3
27232 RgSchDlHqProcCb *proc,
27233 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27236 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27242 RgSchDlHqProcCb *proc;
27243 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27247 TRC2(rgSCHCmnDlAllocRetxRbTM3);
27249 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
27250 (proc->tbInfo[1].state == HQ_TB_NACKED))
27253 printf ("RETX RB TM3 nack for both hqp %d cell %d \n", proc->procId, proc->hqE->cell->cellId);
27255 /* Both TBs require RETX allocation */
27256 rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo,\
27257 proc, cellWdAllocInfo);
27261 /* One of the TBs need RETX allocation. Other TB may/maynot
27262 * be available for new TX allocation. */
27263 rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo,\
27264 proc, cellWdAllocInfo);
27272 * @brief This function performs the DCI format selection in case of
27273 * Transmit Diversity scheme where there can be more
27274 * than 1 option for DCI format selection.
27278 * Function: rgSCHCmnSlctPdcchFrmt
27279 * Purpose: 1. If DLFS is enabled, then choose TM specific
27280 * DCI format for Transmit diversity. All the
27281 * TM Specific DCI Formats support Type0 and/or
27282 * Type1 resource allocation scheme. DLFS
27283 * supports only Type-0&1 Resource allocation.
27284 * 2. If DLFS is not enabled, select a DCI format
27285 * which is of smaller size. Since Non-DLFS
27286 * scheduler supports all Resource allocation
27287 * schemes, selection is based on efficiency.
27289 * Invoked by: DL UE Allocation by Common Scheduler.
27291 * @param[in] RgSchCellCb *cell
27292 * @param[in] RgSchUeCb *ue
27293 * @param[out] U8 *raType
27294 * @return TfuDciFormat
27298 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt
27305 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt(cell, ue, raType)
27311 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
27313 TRC2(rgSCHCmnSlctPdcchFrmt);
27315 /* ccpu00140894- Selective DCI Format and RA type should be selected only
27316 * after TX Mode transition is completed*/
27317 if ((cellSch->dl.isDlFreqSel) && (ue->txModeTransCmplt))
27319 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciRAType;
27320 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciFrmt);
27324 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciRAType;
27325 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciFrmt);
27331 * @brief This function handles Retx allocation in case of TM3 UEs
27332 * where both the TBs were NACKED previously.
27336 * Function: rgSCHCmnDlTM3RetxRetx
27337 * Purpose: If forceTD flag enabled
27338 * TD for TB1 on CW1.
27340 * DCI Frmt 2A and RA Type 0
27341 * RI layered SM of both TBs on 2 CWs
27342 * Add UE to cell Alloc Info.
27343 * Fill UE alloc Info.
27346 * Successful allocation is indicated by non-zero effBo value.
27348 * Invoked by: rgSCHCmnDlAllocRbTM3
27350 * @param[in] RgSchCellCb *cell
27351 * @param[in] RgSchDlSf *subFrm
27352 * @param[in] RgSchUeCb *ue
27353 * @param[in] U32 bo
27354 * @param[out] U32 *effBo
27355 * @param[in] RgSchDlHqProcCb *proc
27356 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27361 PRIVATE Void rgSCHCmnDlTM3RetxRetx
27368 RgSchDlHqProcCb *proc,
27369 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27372 PRIVATE Void rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27378 RgSchDlHqProcCb *proc;
27379 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27383 RgSchDlRbAlloc *allocInfo;
27390 TRC2(rgSCHCmnDlTM3RetxRetx);
27393 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27395 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27397 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
27398 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27400 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27402 if (ret == RFAILED)
27404 /* Allocation couldn't be made for Retx */
27405 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27408 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27409 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27410 #ifdef FOUR_TX_ANTENNA
27411 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1 should
27412 * have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27413 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27416 proc->cwSwpEnabled = TRUE;
27419 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27420 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27424 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27427 /* Adding UE to allocInfo RETX Lst */
27428 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27430 /* Fill UE alloc Info scratch pad */
27431 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27432 precInfo, noTxLyrs, subFrm);
27439 * @brief This function handles Retx allocation in case of TM4 UEs
27440 * where both the TBs were NACKED previously.
27444 * Function: rgSCHCmnDlTM4RetxRetx
27445 * Purpose: If forceTD flag enabled
27446 * TD for TB1 on CW1.
27448 * DCI Frmt 2 and RA Type 0
27450 * 1 layer SM of TB1 on CW1.
27452 * RI layered SM of both TBs on 2 CWs
27453 * Add UE to cell Alloc Info.
27454 * Fill UE alloc Info.
27457 * Successful allocation is indicated by non-zero effBo value.
27459 * Invoked by: rgSCHCmnDlAllocRbTM4
27461 * @param[in] RgSchCellCb *cell
27462 * @param[in] RgSchDlSf *subFrm
27463 * @param[in] RgSchUeCb *ue
27464 * @param[in] U32 bo
27465 * @param[out] U32 *effBo
27466 * @param[in] RgSchDlHqProcCb *proc
27467 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27472 PRIVATE Void rgSCHCmnDlTM4RetxRetx
27479 RgSchDlHqProcCb *proc,
27480 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27483 PRIVATE Void rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27489 RgSchDlHqProcCb *proc;
27490 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27494 RgSchDlRbAlloc *allocInfo;
27496 Bool swpFlg = FALSE;
27498 #ifdef FOUR_TX_ANTENNA
27503 TRC2(rgSCHCmnDlTM4RetxRetx);
27506 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27508 /* Irrespective of RI Schedule both CWs */
27509 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
27510 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27512 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27514 if (ret == RFAILED)
27516 /* Allocation couldn't be made for Retx */
27517 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27520 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27522 #ifdef FOUR_TX_ANTENNA
27523 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1
27524 * should have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27525 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27528 proc->cwSwpEnabled = TRUE;
27530 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27531 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27535 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27538 /* Adding UE to allocInfo RETX Lst */
27539 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27541 /* Fill UE alloc Info scratch pad */
27542 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27543 precInfo, noTxLyrs, subFrm);
27551 * @brief This function determines Transmission attributes
27552 * incase of Spatial multiplexing for TX and RETX TBs.
27556 * Function: rgSCHCmnDlSMGetAttrForTxRetx
27557 * Purpose: 1. Reached here for a TM3/4 UE's HqP whose one of the TBs is
27558 * NACKED and the other TB is either NACKED or WAITING.
27559 * 2. Select the NACKED TB for RETX allocation.
27560 * 3. Allocation preference for RETX TB by mapping it to a better
27561 * CW (better in terms of efficiency).
27562 * 4. Determine the state of the other TB.
27563 * Determine if swapFlag were to be set.
27564 * Swap flag would be set if Retx TB is cross
27566 * 5. If UE has new data available for TX and if the other TB's state
27567 * is ACKED then set furtherScope as TRUE.
27569 * Invoked by: rgSCHCmnDlTM3[4]TxRetx
27571 * @param[in] RgSchUeCb *ue
27572 * @param[in] RgSchDlHqProcCb *proc
27573 * @param[out] RgSchDlHqTbCb **retxTb
27574 * @param[out] RgSchDlHqTbCb **txTb
27575 * @param[out] Bool *frthrScp
27576 * @param[out] Bool *swpFlg
27581 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx
27584 RgSchDlHqProcCb *proc,
27585 RgSchDlHqTbCb **retxTb,
27586 RgSchDlHqTbCb **txTb,
27591 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, frthrScp,\
27594 RgSchDlHqProcCb *proc;
27595 RgSchDlHqTbCb **retxTb;
27596 RgSchDlHqTbCb **txTb;
27601 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,proc->hqE->cell);
27602 RgSchDlRbAlloc *allocInfo;
27604 TRC2(rgSCHCmnDlSMGetAttrForTxRetx);
27606 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27608 *retxTb = &proc->tbInfo[0];
27609 *txTb = &proc->tbInfo[1];
27610 /* TENB_BRDCM_TM4- Currently disabling swapflag for TM3/TM4, since
27611 * HqFeedback processing does not consider a swapped hq feedback */
27612 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 1))
27615 proc->cwSwpEnabled = TRUE;
27617 if (proc->tbInfo[1].state == HQ_TB_ACKED)
27619 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27620 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27625 *retxTb = &proc->tbInfo[1];
27626 *txTb = &proc->tbInfo[0];
27627 /* TENB_BRDCM_TM4 - Currently disabling swapflag for TM3/TM4, since
27628 * HqFeedback processing does not consider a swapped hq feedback */
27629 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 0))
27632 proc->cwSwpEnabled = TRUE;
27634 if (proc->tbInfo[0].state == HQ_TB_ACKED)
27636 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27637 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27645 * @brief Determine Precoding information for TM3 2 TX Antenna.
27649 * Function: rgSCHCmnDlTM3PrecInf2
27652 * Invoked by: rgSCHCmnDlGetAttrForTM3
27654 * @param[in] RgSchUeCb *ue
27655 * @param[in] U8 numTxLyrs
27656 * @param[in] Bool bothCwEnbld
27661 PRIVATE U8 rgSCHCmnDlTM3PrecInf2
27669 PRIVATE U8 rgSCHCmnDlTM3PrecInf2(ue, numTxLyrs, bothCwEnbld)
27676 TRC2(rgSCHCmnDlTM3PrecInf2);
27683 * @brief Determine Precoding information for TM4 2 TX Antenna.
27687 * Function: rgSCHCmnDlTM4PrecInf2
27688 * Purpose: To determine a logic of deriving precoding index
27689 * information from 36.212 table 5.3.3.1.5-4
27691 * Invoked by: rgSCHCmnDlGetAttrForTM4
27693 * @param[in] RgSchUeCb *ue
27694 * @param[in] U8 numTxLyrs
27695 * @param[in] Bool bothCwEnbld
27700 PRIVATE U8 rgSCHCmnDlTM4PrecInf2
27708 PRIVATE U8 rgSCHCmnDlTM4PrecInf2(ue, numTxLyrs, bothCwEnbld)
27715 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27718 TRC2(rgSCHCmnDlTM4PrecInf2);
27720 if (ueDl->mimoInfo.ri == numTxLyrs)
27722 if (ueDl->mimoInfo.ri == 2)
27724 /* PrecInfo corresponding to 2 CW
27726 if (ue->mimoInfo.puschFdbkVld)
27732 precIdx = ueDl->mimoInfo.pmi - 1;
27737 /* PrecInfo corresponding to 1 CW
27739 if (ue->mimoInfo.puschFdbkVld)
27745 precIdx = ueDl->mimoInfo.pmi + 1;
27749 else if (ueDl->mimoInfo.ri > numTxLyrs)
27751 /* In case of choosing among the columns of a
27752 * precoding matrix, choose the column corresponding
27753 * to the MAX-CQI */
27754 if (ue->mimoInfo.puschFdbkVld)
27760 precIdx = (ueDl->mimoInfo.pmi- 1)* 2 + 1;
27763 else /* if RI < numTxLyrs */
27765 precIdx = (ueDl->mimoInfo.pmi < 2)? 0:1;
27772 * @brief Determine Precoding information for TM3 4 TX Antenna.
27776 * Function: rgSCHCmnDlTM3PrecInf4
27777 * Purpose: To determine a logic of deriving precoding index
27778 * information from 36.212 table 5.3.3.1.5A-2
27780 * Invoked by: rgSCHCmnDlGetAttrForTM3
27782 * @param[in] RgSchUeCb *ue
27783 * @param[in] U8 numTxLyrs
27784 * @param[in] Bool bothCwEnbld
27789 PRIVATE U8 rgSCHCmnDlTM3PrecInf4
27797 PRIVATE U8 rgSCHCmnDlTM3PrecInf4(ue, numTxLyrs, bothCwEnbld)
27806 TRC2(rgSCHCmnDlTM3PrecInf4);
27810 precIdx = numTxLyrs - 2;
27812 else /* one 1 CW transmission */
27821 * @brief Determine Precoding information for TM4 4 TX Antenna.
27825 * Function: rgSCHCmnDlTM4PrecInf4
27826 * Purpose: To determine a logic of deriving precoding index
27827 * information from 36.212 table 5.3.3.1.5-5
27829 * Invoked by: rgSCHCmnDlGetAttrForTM4
27831 * @param[in] RgSchUeCb *ue
27832 * @param[in] U8 numTxLyrs
27833 * @param[in] Bool bothCwEnbld
27838 PRIVATE U8 rgSCHCmnDlTM4PrecInf4
27846 PRIVATE U8 rgSCHCmnDlTM4PrecInf4(cell, ue, numTxLyrs, bothCwEnbld)
27853 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27854 U8 precInfoBaseIdx, precIdx;
27856 TRC2(rgSCHCmnDlTM4PrecInf4);
27858 precInfoBaseIdx = (ue->mimoInfo.puschFdbkVld)? (16):
27859 (ueDl->mimoInfo.pmi);
27862 precIdx = precInfoBaseIdx + (numTxLyrs-2)*17;
27864 else /* one 1 CW transmission */
27866 precInfoBaseIdx += 1;
27867 precIdx = precInfoBaseIdx + (numTxLyrs-1)*17;
27874 * @brief This function determines Transmission attributes
27875 * incase of TM3 scheduling.
27879 * Function: rgSCHCmnDlGetAttrForTM3
27880 * Purpose: Determine retx TB and tx TB based on TB states.
27881 * If forceTD enabled
27882 * perform only retx TB allocation.
27883 * If retxTB == TB2 then DCI Frmt = 2A, RA Type = 0.
27884 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
27886 * perform retxTB allocation on CW1.
27888 * Determine further Scope and Swap Flag attributes
27889 * assuming a 2 CW transmission of RetxTB and new Tx TB.
27890 * If no further scope for new TX allocation
27891 * Allocate only retx TB using 2 layers if
27892 * this TB was previously transmitted using 2 layers AND
27893 * number of Tx antenna ports == 4.
27894 * otherwise do single layer precoding.
27896 * Invoked by: rgSCHCmnDlTM3TxRetx
27898 * @param[in] RgSchUeCb *ue
27899 * @param[in] RgSchDlHqProcCb *proc
27900 * @param[out] U8 *numTxLyrs
27901 * @param[out] Bool *isTraDiv
27902 * @param[out] U8 *prcdngInf
27903 * @param[out] U8 *raType
27908 PRIVATE Void rgSCHCmnDlGetAttrForTM3
27912 RgSchDlHqProcCb *proc,
27914 TfuDciFormat *dciFrmt,
27916 RgSchDlHqTbCb **retxTb,
27917 RgSchDlHqTbCb **txTb,
27923 PRIVATE Void rgSCHCmnDlGetAttrForTM3(cell, ue, proc, numTxLyrs, dciFrmt,\
27924 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
27927 RgSchDlHqProcCb *proc;
27929 TfuDciFormat *dciFrmt;
27931 RgSchDlHqTbCb **retxTb;
27932 RgSchDlHqTbCb **txTb;
27938 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27941 TRC2(rgSCHCmnDlGetAttrForTM3);
27943 /* Avoiding Tx-Retx for LAA cell as firstSchedTime is associated with
27945 /* Integration_fix: SPS Proc shall always have only one Cw */
27947 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
27948 (ueDl->mimoInfo.forceTD))
27950 ||(TRUE == rgSCHLaaSCellEnabled(cell))
27954 if ((ueDl->mimoInfo.forceTD)
27956 || (TRUE == rgSCHLaaSCellEnabled(cell))
27961 /* Transmit Diversity. Format based on dlfsEnabled
27962 * No further scope */
27963 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27965 *retxTb = &proc->tbInfo[0];
27966 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
27970 *retxTb = &proc->tbInfo[1];
27971 *dciFrmt = TFU_DCI_FORMAT_2A;
27972 *raType = RG_SCH_CMN_RA_TYPE0;
27980 /* Determine the 2 TB transmission attributes */
27981 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
27985 /* Prefer allocation of RETX TB over 2 layers rather than combining
27986 * it with a new TX. */
27987 if ((ueDl->mimoInfo.ri == 2)
27988 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
27990 /* Allocate TB on CW1, using 2 Lyrs,
27991 * Format 2, precoding accordingly */
27997 *numTxLyrs= ((*retxTb)->numLyrs + ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)].noLyr);
27999 if((*retxTb)->tbIdx == 0 && ((*retxTb)->numLyrs == 2 ) && *numTxLyrs ==3)
28002 proc->cwSwpEnabled = TRUE;
28004 else if((*retxTb)->tbIdx == 1 && ((*retxTb)->numLyrs == 1) && *numTxLyrs ==3)
28007 proc->cwSwpEnabled = TRUE;
28011 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28012 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])\
28013 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28014 *dciFrmt = TFU_DCI_FORMAT_2A;
28015 *raType = RG_SCH_CMN_RA_TYPE0;
28017 else /* frthrScp == FALSE */
28019 if (cell->numTxAntPorts == 2)
28021 /* Transmit Diversity */
28023 if ((*retxTb)->tbIdx == 0)
28025 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28029 /* If retxTB is TB2 then use format 2A */
28030 *dciFrmt = TFU_DCI_FORMAT_2A;
28031 *raType = RG_SCH_CMN_RA_TYPE0;
28036 else /* NumAntPorts == 4 */
28038 if ((*retxTb)->numLyrs == 2)
28040 /* Allocate TB on CW1, using 2 Lyrs,
28041 * Format 2A, precoding accordingly */
28043 *dciFrmt = TFU_DCI_FORMAT_2A;
28044 *raType = RG_SCH_CMN_RA_TYPE0;
28045 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28046 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, *numTxLyrs, *frthrScp);
28051 /* Transmit Diversity */
28053 if ((*retxTb)->tbIdx == 0)
28055 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28059 /* If retxTB is TB2 then use format 2A */
28060 *dciFrmt = TFU_DCI_FORMAT_2A;
28061 *raType = RG_SCH_CMN_RA_TYPE0;
28075 * @brief This function determines Transmission attributes
28076 * incase of TM4 scheduling.
28080 * Function: rgSCHCmnDlGetAttrForTM4
28081 * Purpose: Determine retx TB and tx TB based on TB states.
28082 * If forceTD enabled
28083 * perform only retx TB allocation.
28084 * If retxTB == TB2 then DCI Frmt = 2, RA Type = 0.
28085 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
28087 * perform retxTB allocation on CW1.
28089 * Determine further Scope and Swap Flag attributes
28090 * assuming a 2 CW transmission of RetxTB and new Tx TB.
28091 * If no further scope for new TX allocation
28092 * Allocate only retx TB using 2 layers if
28093 * this TB was previously transmitted using 2 layers AND
28094 * number of Tx antenna ports == 4.
28095 * otherwise do single layer precoding.
28097 * Invoked by: rgSCHCmnDlTM4TxRetx
28099 * @param[in] RgSchUeCb *ue
28100 * @param[in] RgSchDlHqProcCb *proc
28101 * @param[out] U8 *numTxLyrs
28102 * @param[out] Bool *isTraDiv
28103 * @param[out] U8 *prcdngInf
28104 * @param[out] U8 *raType
28109 PRIVATE Void rgSCHCmnDlGetAttrForTM4
28113 RgSchDlHqProcCb *proc,
28115 TfuDciFormat *dciFrmt,
28117 RgSchDlHqTbCb **retxTb,
28118 RgSchDlHqTbCb **txTb,
28124 PRIVATE Void rgSCHCmnDlGetAttrForTM4(cell, ue, proc, numTxLyrs, dciFrmt,\
28125 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
28128 RgSchDlHqProcCb *proc;
28130 TfuDciFormat *dciFrmt;
28132 RgSchDlHqTbCb **retxTb;
28133 RgSchDlHqTbCb **txTb;
28139 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28142 TRC2(rgSCHCmnDlGetAttrForTM4);
28145 /* Integration_fix: SPS Proc shall always have only one Cw */
28147 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28148 (ueDl->mimoInfo.forceTD))
28150 ||(TRUE == rgSCHLaaSCellEnabled(cell))
28154 if ((ueDl->mimoInfo.forceTD)
28156 || (TRUE == rgSCHLaaSCellEnabled(cell))
28161 /* Transmit Diversity. Format based on dlfsEnabled
28162 * No further scope */
28163 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28165 *retxTb = &proc->tbInfo[0];
28166 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28170 *retxTb = &proc->tbInfo[1];
28171 *dciFrmt = TFU_DCI_FORMAT_2;
28172 *raType = RG_SCH_CMN_RA_TYPE0;
28180 if (ueDl->mimoInfo.ri == 1)
28182 /* single layer precoding. Format 2.
28183 * No further scope */
28184 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28186 *retxTb = &proc->tbInfo[0];
28190 *retxTb = &proc->tbInfo[1];
28193 *dciFrmt = TFU_DCI_FORMAT_2;
28194 *raType = RG_SCH_CMN_RA_TYPE0;
28196 *prcdngInf = 0; /*When RI= 1*/
28200 /* Determine the 2 TB transmission attributes */
28201 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
28203 *dciFrmt = TFU_DCI_FORMAT_2;
28204 *raType = RG_SCH_CMN_RA_TYPE0;
28207 /* Prefer allocation of RETX TB over 2 layers rather than combining
28208 * it with a new TX. */
28209 if ((ueDl->mimoInfo.ri == 2)
28210 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
28212 /* Allocate TB on CW1, using 2 Lyrs,
28213 * Format 2, precoding accordingly */
28217 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28218 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])
28219 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28221 else /* frthrScp == FALSE */
28223 if (cell->numTxAntPorts == 2)
28225 /* single layer precoding. Format 2. */
28227 *prcdngInf = (getPrecInfoFunc[1][cell->numTxAntPorts/2 - 1])\
28228 (cell, ue, *numTxLyrs, *frthrScp);
28231 else /* NumAntPorts == 4 */
28233 if ((*retxTb)->numLyrs == 2)
28235 /* Allocate TB on CW1, using 2 Lyrs,
28236 * Format 2, precoding accordingly */
28238 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28239 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28240 (cell, ue, *numTxLyrs, *frthrScp);
28245 /* Allocate TB with 1 lyr precoding,
28246 * Format 2, precoding info accordingly */
28248 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28249 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28250 (cell, ue, *numTxLyrs, *frthrScp);
28261 * @brief This function handles Retx allocation in case of TM3 UEs
28262 * where previously one of the TBs was NACKED and the other
28263 * TB is either ACKED/WAITING.
28267 * Function: rgSCHCmnDlTM3TxRetx
28268 * Purpose: Determine the TX attributes for TM3 TxRetx Allocation.
28269 * If futher Scope for New Tx Allocation on other TB
28270 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28271 * Add UE to cell wide RetxTx List.
28273 * Perform only RETX alloc'n on CW1.
28274 * Add UE to cell wide Retx List.
28276 * effBo is set to a non-zero value if allocation is
28279 * Invoked by: rgSCHCmnDlAllocRbTM3
28281 * @param[in] RgSchCellCb *cell
28282 * @param[in] RgSchDlSf *subFrm
28283 * @param[in] RgSchUeCb *ue
28284 * @param[in] U32 bo
28285 * @param[out] U32 *effBo
28286 * @param[in] RgSchDlHqProcCb *proc
28287 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28292 PRIVATE Void rgSCHCmnDlTM3TxRetx
28299 RgSchDlHqProcCb *proc,
28300 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28303 PRIVATE Void rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28309 RgSchDlHqProcCb *proc;
28310 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28314 RgSchDlRbAlloc *allocInfo;
28316 RgSchDlHqTbCb *retxTb, *txTb;
28322 TRC2(rgSCHCmnDlTM3TxRetx);
28326 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28329 /* Determine the transmission attributes */
28330 rgSCHCmnDlGetAttrForTM3(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28331 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28332 &allocInfo->raType);
28337 printf ("TX RETX called from proc %d cell %d \n",proc->procId, cell->cellId);
28339 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28341 if (ret == RFAILED)
28343 /* Allocation couldn't be made for Retx */
28344 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28347 /* Adding UE to RbAllocInfo RETX-TX Lst */
28348 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28352 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28353 numTxLyrs, &numRb, effBo);
28354 if (ret == RFAILED)
28356 /* Allocation couldn't be made for Retx */
28357 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28361 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28364 /* Adding UE to allocInfo RETX Lst */
28365 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28368 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28369 prcdngInf, numTxLyrs, subFrm);
28376 * @brief This function handles Retx allocation in case of TM4 UEs
28377 * where previously one of the TBs was NACKED and the other
28378 * TB is either ACKED/WAITING.
28382 * Function: rgSCHCmnDlTM4TxRetx
28383 * Purpose: Determine the TX attributes for TM4 TxRetx Allocation.
28384 * If futher Scope for New Tx Allocation on other TB
28385 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28386 * Add UE to cell wide RetxTx List.
28388 * Perform only RETX alloc'n on CW1.
28389 * Add UE to cell wide Retx List.
28391 * effBo is set to a non-zero value if allocation is
28394 * Invoked by: rgSCHCmnDlAllocRbTM4
28396 * @param[in] RgSchCellCb *cell
28397 * @param[in] RgSchDlSf *subFrm
28398 * @param[in] RgSchUeCb *ue
28399 * @param[in] U32 bo
28400 * @param[out] U32 *effBo
28401 * @param[in] RgSchDlHqProcCb *proc
28402 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28407 PRIVATE Void rgSCHCmnDlTM4TxRetx
28414 RgSchDlHqProcCb *proc,
28415 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28418 PRIVATE Void rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28424 RgSchDlHqProcCb *proc;
28425 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28429 RgSchDlRbAlloc *allocInfo;
28431 RgSchDlHqTbCb *retxTb, *txTb;
28437 TRC2(rgSCHCmnDlTM4TxRetx);
28440 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28443 /* Determine the transmission attributes */
28444 rgSCHCmnDlGetAttrForTM4(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28445 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28446 &allocInfo->raType);
28450 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28452 if (ret == RFAILED)
28454 /* Fix : syed If TxRetx allocation failed then add the UE along
28455 * with the proc to the nonSchdTxRetxUeLst and let spfc scheduler
28456 * take care of it during finalization. */
28457 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28460 /* Adding UE to RbAllocInfo RETX-TX Lst */
28461 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28465 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28466 numTxLyrs, &numRb, effBo);
28467 if (ret == RFAILED)
28469 /* Allocation couldn't be made for Retx */
28470 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28474 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28477 /* Adding UE to allocInfo RETX Lst */
28478 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28481 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28482 prcdngInf, numTxLyrs, subFrm)
28489 * @brief This function handles Retx allocation in case of TM4 UEs
28490 * where previously both the TBs were ACKED and ACKED
28495 * Function: rgSCHCmnDlTM3TxTx
28496 * Purpose: Reached here for a TM3 UE's HqP's fresh allocation
28497 * where both the TBs are free for TX scheduling.
28498 * If forceTD flag is set
28499 * perform TD on CW1 with TB1.
28504 * RI layered precoding 2 TB on 2 CW.
28505 * Set precoding info.
28506 * Add UE to cellAllocInfo.
28507 * Fill ueAllocInfo.
28509 * effBo is set to a non-zero value if allocation is
28512 * Invoked by: rgSCHCmnDlAllocRbTM3
28514 * @param[in] RgSchCellCb *cell
28515 * @param[in] RgSchDlSf *subFrm
28516 * @param[in] RgSchUeCb *ue
28517 * @param[in] U32 bo
28518 * @param[out] U32 *effBo
28519 * @param[in] RgSchDlHqProcCb *proc
28520 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28525 PRIVATE Void rgSCHCmnDlTM3TxTx
28532 RgSchDlHqProcCb *proc,
28533 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28536 PRIVATE Void rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28542 RgSchDlHqProcCb *proc;
28543 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28546 RgSchCmnDlUe *ueDl;
28547 RgSchDlRbAlloc *allocInfo;
28554 TRC2(rgSCHCmnDlTM3TxTx);
28557 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28558 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28560 /* Integration_fix: SPS Proc shall always have only one Cw */
28562 #ifdef FOUR_TX_ANTENNA
28563 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28564 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28566 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28567 (ueDl->mimoInfo.forceTD))
28570 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28573 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28574 &allocInfo->raType);
28575 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28576 bo, &numRb, effBo);
28577 if (ret == RFAILED)
28579 /* If allocation couldn't be made then return */
28583 precInfo = 0; /* TD */
28585 else /* Precoding */
28587 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
28588 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28590 /* Spatial Multiplexing using 2 CWs */
28591 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28592 if (ret == RFAILED)
28594 /* If allocation couldn't be made then return */
28597 noTxLyrs = ueDl->mimoInfo.ri;
28598 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28599 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, getPrecInfoFunc[0], precInfoAntIdx);
28600 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28604 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28607 /* Adding UE to RbAllocInfo TX Lst */
28608 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28610 /* Fill UE allocInfo scrath pad */
28611 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28612 precInfo, noTxLyrs, subFrm);
28619 * @brief This function handles Retx allocation in case of TM4 UEs
28620 * where previously both the TBs were ACKED and ACKED
28625 * Function: rgSCHCmnDlTM4TxTx
28626 * Purpose: Reached here for a TM4 UE's HqP's fresh allocation
28627 * where both the TBs are free for TX scheduling.
28628 * If forceTD flag is set
28629 * perform TD on CW1 with TB1.
28635 * Single layer precoding of TB1 on CW1.
28636 * Set precoding info.
28638 * RI layered precoding 2 TB on 2 CW.
28639 * Set precoding info.
28640 * Add UE to cellAllocInfo.
28641 * Fill ueAllocInfo.
28643 * effBo is set to a non-zero value if allocation is
28646 * Invoked by: rgSCHCmnDlAllocRbTM4
28648 * @param[in] RgSchCellCb *cell
28649 * @param[in] RgSchDlSf *subFrm
28650 * @param[in] RgSchUeCb *ue
28651 * @param[in] U32 bo
28652 * @param[out] U32 *effBo
28653 * @param[in] RgSchDlHqProcCb *proc
28654 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28659 PRIVATE Void rgSCHCmnDlTM4TxTx
28666 RgSchDlHqProcCb *proc,
28667 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28670 PRIVATE Void rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28676 RgSchDlHqProcCb *proc;
28677 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28680 RgSchCmnDlUe *ueDl;
28681 RgSchDlRbAlloc *allocInfo;
28688 TRC2(rgSCHCmnDlTM4TxTx);
28691 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28692 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28694 /* Integration_fix: SPS Proc shall always have only one Cw */
28696 #ifdef FOUR_TX_ANTENNA
28697 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28698 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28700 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28701 (ueDl->mimoInfo.forceTD))
28704 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28707 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28708 &allocInfo->raType);
28710 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28711 bo, &numRb, effBo);
28712 if (ret == RFAILED)
28714 /* If allocation couldn't be made then return */
28718 precInfo = 0; /* TD */
28720 else /* Precoding */
28722 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
28723 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28725 if (ueDl->mimoInfo.ri == 1)
28727 /* Single Layer SM using FORMAT 2 */
28728 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28729 bo, &numRb, effBo);
28730 if (ret == RFAILED)
28732 /* If allocation couldn't be made then return */
28736 precInfo = 0; /* PrecInfo as 0 for RI=1*/
28740 /* Spatial Multiplexing using 2 CWs */
28741 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28742 if (ret == RFAILED)
28744 /* If allocation couldn't be made then return */
28747 noTxLyrs = ueDl->mimoInfo.ri;
28748 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28749 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28755 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28758 /* Adding UE to RbAllocInfo TX Lst */
28759 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28762 /* Fill UE allocInfo scrath pad */
28763 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28764 precInfo, noTxLyrs, subFrm);
28771 * @brief This function determines the RBs and Bytes required for BO
28772 * transmission for UEs configured with TM 4.
28776 * Function: rgSCHCmnDlAllocTxRbTM4
28777 * Purpose: Invokes the functionality particular to the
28778 * current state of the TBs of the "proc".
28780 * Reference Parameter effBo is filled with alloced bytes.
28781 * Returns RFAILED if BO not satisfied at all.
28783 * Invoked by: rgSCHCmnDlAllocTxRb
28785 * @param[in] RgSchCellCb *cell
28786 * @param[in] RgSchDlSf *subFrm
28787 * @param[in] RgSchUeCb *ue
28788 * @param[in] U32 bo
28789 * @param[out] U32 *effBo
28790 * @param[in] RgSchDlHqProcCb *proc
28791 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28796 PRIVATE Void rgSCHCmnDlAllocTxRbTM4
28803 RgSchDlHqProcCb *proc,
28804 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28807 PRIVATE Void rgSCHCmnDlAllocTxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28813 RgSchDlHqProcCb *proc;
28814 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28817 TRC2(rgSCHCmnDlAllocTxRbTM4);
28819 /* Both TBs free for TX allocation */
28820 rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo,\
28821 proc, cellWdAllocInfo);
28828 * @brief This function determines the RBs and Bytes required for BO
28829 * retransmission for UEs configured with TM 4.
28833 * Function: rgSCHCmnDlAllocRetxRbTM4
28834 * Purpose: Invokes the functionality particular to the
28835 * current state of the TBs of the "proc".
28837 * Reference Parameter effBo is filled with alloced bytes.
28838 * Returns RFAILED if BO not satisfied at all.
28840 * Invoked by: rgSCHCmnDlAllocRetxRb
28842 * @param[in] RgSchCellCb *cell
28843 * @param[in] RgSchDlSf *subFrm
28844 * @param[in] RgSchUeCb *ue
28845 * @param[in] U32 bo
28846 * @param[out] U32 *effBo
28847 * @param[in] RgSchDlHqProcCb *proc
28848 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28853 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4
28860 RgSchDlHqProcCb *proc,
28861 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28864 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28870 RgSchDlHqProcCb *proc;
28871 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28874 TRC2(rgSCHCmnDlAllocRetxRbTM4);
28876 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
28877 (proc->tbInfo[1].state == HQ_TB_NACKED))
28879 /* Both TBs require RETX allocation */
28880 rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo,\
28881 proc, cellWdAllocInfo);
28885 /* One of the TBs need RETX allocation. Other TB may/maynot
28886 * be available for new TX allocation. */
28887 rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo,\
28888 proc, cellWdAllocInfo);
28897 * @brief This function determines the RBs and Bytes required for BO
28898 * transmission for UEs configured with TM 5.
28902 * Function: rgSCHCmnDlAllocTxRbTM5
28905 * Reference Parameter effBo is filled with alloced bytes.
28906 * Returns RFAILED if BO not satisfied at all.
28908 * Invoked by: rgSCHCmnDlAllocTxRb
28910 * @param[in] RgSchCellCb *cell
28911 * @param[in] RgSchDlSf *subFrm
28912 * @param[in] RgSchUeCb *ue
28913 * @param[in] U32 bo
28914 * @param[out] U32 *effBo
28915 * @param[in] RgSchDlHqProcCb *proc
28916 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28921 PRIVATE Void rgSCHCmnDlAllocTxRbTM5
28928 RgSchDlHqProcCb *proc,
28929 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28932 PRIVATE Void rgSCHCmnDlAllocTxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28938 RgSchDlHqProcCb *proc;
28939 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28942 TRC2(rgSCHCmnDlAllocTxRbTM5);
28943 #if (ERRCLASS & ERRCLS_DEBUG)
28944 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
28951 * @brief This function determines the RBs and Bytes required for BO
28952 * retransmission for UEs configured with TM 5.
28956 * Function: rgSCHCmnDlAllocRetxRbTM5
28959 * Reference Parameter effBo is filled with alloced bytes.
28960 * Returns RFAILED if BO not satisfied at all.
28962 * Invoked by: rgSCHCmnDlAllocRetxRb
28964 * @param[in] RgSchCellCb *cell
28965 * @param[in] RgSchDlSf *subFrm
28966 * @param[in] RgSchUeCb *ue
28967 * @param[in] U32 bo
28968 * @param[out] U32 *effBo
28969 * @param[in] RgSchDlHqProcCb *proc
28970 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28975 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5
28982 RgSchDlHqProcCb *proc,
28983 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28986 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28992 RgSchDlHqProcCb *proc;
28993 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28996 TRC2(rgSCHCmnDlAllocRetxRbTM5);
28997 #if (ERRCLASS & ERRCLS_DEBUG)
28998 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
29006 * @brief This function determines the RBs and Bytes required for BO
29007 * transmission for UEs configured with TM 6.
29011 * Function: rgSCHCmnDlAllocTxRbTM6
29014 * Reference Parameter effBo is filled with alloced bytes.
29015 * Returns RFAILED if BO not satisfied at all.
29017 * Invoked by: rgSCHCmnDlAllocTxRb
29019 * @param[in] RgSchCellCb *cell
29020 * @param[in] RgSchDlSf *subFrm
29021 * @param[in] RgSchUeCb *ue
29022 * @param[in] U32 bo
29023 * @param[out] U32 *effBo
29024 * @param[in] RgSchDlHqProcCb *proc
29025 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29030 PRIVATE Void rgSCHCmnDlAllocTxRbTM6
29037 RgSchDlHqProcCb *proc,
29038 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29041 PRIVATE Void rgSCHCmnDlAllocTxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29047 RgSchDlHqProcCb *proc;
29048 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29051 RgSchDlRbAlloc *allocInfo;
29052 RgSchCmnDlUe *ueDl;
29056 TRC2(rgSCHCmnDlAllocTxRbTM6);
29059 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29060 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29062 if (ueDl->mimoInfo.forceTD)
29064 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29065 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29069 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29070 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29071 /* Fill precoding information for FORMAT 1B */
29072 /* First 4 least significant bits to indicate PMI.
29073 * 4th most significant corresponds to pmi Confirmation.
29075 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29076 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29078 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
29079 bo, &numRb, effBo);
29080 if (ret == RFAILED)
29082 /* If allocation couldn't be made then return */
29087 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
29090 /* Adding UE to RbAllocInfo TX Lst */
29091 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
29093 /* Fill UE alloc Info */
29094 allocInfo->rbsReq = numRb;
29095 allocInfo->dlSf = subFrm;
29101 * @brief This function determines the RBs and Bytes required for BO
29102 * retransmission for UEs configured with TM 6.
29106 * Function: rgSCHCmnDlAllocRetxRbTM6
29109 * Reference Parameter effBo is filled with alloced bytes.
29110 * Returns RFAILED if BO not satisfied at all.
29112 * Invoked by: rgSCHCmnDlAllocRetxRb
29114 * @param[in] RgSchCellCb *cell
29115 * @param[in] RgSchDlSf *subFrm
29116 * @param[in] RgSchUeCb *ue
29117 * @param[in] U32 bo
29118 * @param[out] U32 *effBo
29119 * @param[in] RgSchDlHqProcCb *proc
29120 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29125 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6
29132 RgSchDlHqProcCb *proc,
29133 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29136 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29142 RgSchDlHqProcCb *proc;
29143 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29146 RgSchDlRbAlloc *allocInfo;
29147 RgSchCmnDlUe *ueDl;
29151 TRC2(rgSCHCmnDlAllocRetxRbTM6);
29154 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29155 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29157 if (ueDl->mimoInfo.forceTD)
29159 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29160 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29164 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29165 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29166 /* Fill precoding information for FORMAT 1B */
29167 /* First 4 least significant bits to indicate PMI.
29168 * 4th most significant corresponds to pmi Confirmation.
29170 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29171 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29174 /* Get the Allocation in terms of RBs that are required for
29175 * this retx of TB1 */
29176 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
29178 if (ret == RFAILED)
29180 /* Allocation couldn't be made for Retx */
29181 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
29184 /* Adding UE to allocInfo RETX Lst */
29185 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
29186 /* Fill UE alloc Info */
29187 allocInfo->rbsReq = numRb;
29188 allocInfo->dlSf = subFrm;
29194 * @brief This function determines the RBs and Bytes required for BO
29195 * transmission for UEs configured with TM 7.
29199 * Function: rgSCHCmnDlAllocTxRbTM7
29202 * Reference Parameter effBo is filled with alloced bytes.
29203 * Returns RFAILED if BO not satisfied at all.
29205 * Invoked by: rgSCHCmnDlAllocTxRb
29207 * @param[in] RgSchCellCb *cell
29208 * @param[in] RgSchDlSf *subFrm
29209 * @param[in] RgSchUeCb *ue
29210 * @param[in] U32 bo
29211 * @param[out] U32 *effBo
29212 * @param[in] RgSchDlHqProcCb *proc
29213 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29218 PRIVATE Void rgSCHCmnDlAllocTxRbTM7
29225 RgSchDlHqProcCb *proc,
29226 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29229 PRIVATE Void rgSCHCmnDlAllocTxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29235 RgSchDlHqProcCb *proc;
29236 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29239 TRC2(rgSCHCmnDlAllocTxRbTM7);
29240 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29246 * @brief This function determines the RBs and Bytes required for BO
29247 * retransmission for UEs configured with TM 7.
29251 * Function: rgSCHCmnDlAllocRetxRbTM7
29254 * Reference Parameter effBo is filled with alloced bytes.
29255 * Returns RFAILED if BO not satisfied at all.
29257 * Invoked by: rgSCHCmnDlAllocRetxRb
29259 * @param[in] RgSchCellCb *cell
29260 * @param[in] RgSchDlSf *subFrm
29261 * @param[in] RgSchUeCb *ue
29262 * @param[in] U32 bo
29263 * @param[out] U32 *effBo
29264 * @param[in] RgSchDlHqProcCb *proc
29265 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29270 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7
29277 RgSchDlHqProcCb *proc,
29278 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29281 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29287 RgSchDlHqProcCb *proc;
29288 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29291 TRC2(rgSCHCmnDlAllocRetxRbTM7);
29292 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29298 * @brief This function invokes the TM specific DL TX RB Allocation routine.
29302 * Function: rgSCHCmnDlAllocTxRb
29303 * Purpose: This function invokes the TM specific
29304 * DL TX RB Allocation routine.
29306 * Invoked by: Specific Schedulers
29308 * @param[in] RgSchCellCb *cell
29309 * @param[in] RgSchDlSf *subFrm
29310 * @param[in] RgSchUeCb *ue
29311 * @param[in] U32 bo
29312 * @param[out] U32 *effBo
29313 * @param[in] RgSchDlHqProcCb *proc
29314 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29319 PUBLIC S16 rgSCHCmnDlAllocTxRb
29326 RgSchDlHqProcCb *proc,
29327 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29330 PUBLIC S16 rgSCHCmnDlAllocTxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29336 RgSchDlHqProcCb *proc;
29337 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29340 U32 newSchBits = 0;
29341 U32 prevSchBits = 0;
29342 RgSchDlRbAlloc *allocInfo;
29344 TRC2(rgSCHCmnDlAllocTxRb);
29346 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29348 ue->dl.aggTbBits = 0;
29352 /* Calculate totals bits previously allocated */
29353 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29354 if (allocInfo->tbInfo[0].schdlngForTb)
29356 prevSchBits += allocInfo->tbInfo[0].bytesReq;
29358 if (allocInfo->tbInfo[1].schdlngForTb)
29360 prevSchBits += allocInfo->tbInfo[1].bytesReq;
29363 /* Call TM specific RB allocation routine */
29364 (dlAllocTxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29365 proc, cellWdAllocInfo);
29369 /* Calculate totals bits newly allocated */
29370 if (allocInfo->tbInfo[0].schdlngForTb)
29372 newSchBits += allocInfo->tbInfo[0].bytesReq;
29374 if (allocInfo->tbInfo[1].schdlngForTb)
29376 newSchBits += allocInfo->tbInfo[1].bytesReq;
29378 if (newSchBits > prevSchBits)
29380 ue->dl.aggTbBits += ((newSchBits - prevSchBits) * 8);
29381 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29388 /* DwPTS Scheduling Changes Start */
29391 * @brief Retransmit decision for TDD. Retx is avoided in below cases
29392 * 1) DL Sf -> Spl Sf
29393 * 2) DL SF -> DL SF 0
29397 * Function: rgSCHCmnRetxAvoidTdd
29398 * Purpose: Avoid allocating RETX for cases 1, 2
29400 * Invoked by: rgSCHCmnRetxAvoidTdd
29402 * @param[in] RgSchDlSf *curSf
29403 * @param[in] RgSchCellCb *cell
29404 * @param[in] RgSchDlHqProcCb *proc
29409 PUBLIC Bool rgSCHCmnRetxAvoidTdd
29413 RgSchDlHqProcCb *proc
29416 PUBLIC Bool rgSCHCmnRetxAvoidTdd(curSf, cell, proc)
29419 RgSchDlHqProcCb *proc;
29422 RgSchTddSfType txSfType = 0;
29424 TRC2(rgSCHCmnRetxAvoidTdd);
29426 /* Get the RBs of TB that will be retransmitted */
29427 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29429 txSfType = proc->tbInfo[0].sfType;
29431 #ifdef XEON_SPECIFIC_CHANGES
29432 #ifndef XEON_TDD_SPCL
29433 /* Avoid re-transmission on Normal SF when the corresponding TB wss transmitted on SPCL SF */
29434 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29441 if (proc->tbInfo[1].state == HQ_TB_NACKED)
29443 /* Select the TxSf with the highest num of possible REs
29444 * In ascending order -> 1) SPL SF 2) DL_SF_0 3) DL_SF */
29445 txSfType = RGSCH_MAX(txSfType, proc->tbInfo[1].sfType);
29447 #ifdef XEON_SPECIFIC_CHANGES
29448 #ifndef XEON_TDD_SPCL
29449 /* Avoid re-transmission on Normal SF when the corresponding TB wss tranmitted on SPCL SF */
29450 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29458 if (txSfType > curSf->sfType)
29469 /* DwPTS Scheduling Changes End */
29472 * @brief Avoid allocating RETX incase of collision
29473 * with reserved resources for BCH/PSS/SSS occassions.
29477 * Function: rgSCHCmnRetxAllocAvoid
29478 * Purpose: Avoid allocating RETX incase of collision
29479 * with reserved resources for BCH/PSS/SSS occassions
29481 * Invoked by: rgSCHCmnDlAllocRetxRb
29483 * @param[in] RgSchDlSf *subFrm
29484 * @param[in] RgSchUeCb *ue
29485 * @param[in] RgSchDlHqProcCb *proc
29490 PUBLIC Bool rgSCHCmnRetxAllocAvoid
29494 RgSchDlHqProcCb *proc
29497 PUBLIC Bool rgSCHCmnRetxAllocAvoid(subFrm, cell, proc)
29500 RgSchDlHqProcCb *proc;
29505 TRC2(rgSCHCmnRetxAllocAvoid);
29507 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29509 reqRbs = proc->tbInfo[0].dlGrnt.numRb;
29513 reqRbs = proc->tbInfo[1].dlGrnt.numRb;
29515 /* Consider the dlGrnt.numRb of the Retransmitting proc->tbInfo
29516 * and current available RBs to determine if this RETX TB
29517 * will collide with the BCH/PSS/SSS occassion */
29518 if (subFrm->sfNum % 5 == 0)
29520 if ((subFrm->bwAssigned < cell->pbchRbEnd) &&
29521 (((subFrm->bwAssigned + reqRbs) - cell->pbchRbStart) > 0))
29533 * @brief This function invokes the TM specific DL RETX RB Allocation routine.
29537 * Function: rgSCHCmnDlAllocRetxRb
29538 * Purpose: This function invokes the TM specific
29539 * DL RETX RB Allocation routine.
29541 * Invoked by: Specific Schedulers
29543 * @param[in] RgSchCellCb *cell
29544 * @param[in] RgSchDlSf *subFrm
29545 * @param[in] RgSchUeCb *ue
29546 * @param[in] U32 bo
29547 * @param[out] U32 *effBo
29548 * @param[in] RgSchDlHqProcCb *proc
29549 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29554 PUBLIC S16 rgSCHCmnDlAllocRetxRb
29561 RgSchDlHqProcCb *proc,
29562 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29565 PUBLIC S16 rgSCHCmnDlAllocRetxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29571 RgSchDlHqProcCb *proc;
29572 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29575 U32 newSchBits = 0;
29576 RgSchDlRbAlloc *allocInfo;
29578 TRC2(rgSCHCmnDlAllocRetxRb);
29580 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29582 ue->dl.aggTbBits = 0;
29586 /* Check for DL BW exhaustion */
29587 if (subFrm->bw <= subFrm->bwAssigned)
29591 /* Call TM specific RB allocation routine */
29592 (dlAllocRetxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29593 proc, cellWdAllocInfo);
29597 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29598 /* Calculate totals bits newly allocated */
29599 if (allocInfo->tbInfo[0].schdlngForTb)
29601 newSchBits += allocInfo->tbInfo[0].bytesReq;
29603 if (allocInfo->tbInfo[1].schdlngForTb)
29605 newSchBits += allocInfo->tbInfo[1].bytesReq;
29607 ue->dl.aggTbBits += (newSchBits * 8);
29608 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29616 * @brief This function determines the RBs and Bytes required for
29617 * Transmission on 1 CW.
29621 * Function: rgSCHCmnDlAlloc1CwTxRb
29622 * Purpose: This function determines the RBs and Bytes required
29623 * for Transmission of DL SVC BO on 1 CW.
29624 * Also, takes care of SVC by SVC allocation by tracking
29625 * previous SVCs allocations.
29626 * Returns RFAILED if BO not satisfied at all.
29628 * Invoked by: DL UE Allocation
29630 * @param[in] RgSchCellCb *cell
29631 * @param[in] RgSchDlSf *subFrm
29632 * @param[in] RgSchUeCb *ue
29633 * @param[in] RgSchDlHqTbCb *tbInfo
29634 * @param[in] U32 bo
29635 * @param[out] U8 *numRb
29636 * @param[out] U32 *effBo
29641 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb
29646 RgSchDlHqTbCb *tbInfo,
29652 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, tbInfo, bo, numRb, effBo)
29656 RgSchDlHqTbCb *tbInfo;
29665 RgSchCmnDlUe *ueDl;
29666 RgSchDlRbAlloc *allocInfo;
29669 /* Correcting wrap around issue.
29670 * This change has been done at mutliple places in this function.*/
29672 TRC2(rgSCHCmnDlAlloc1CwTxRb);
29675 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29676 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29677 oldReq = ueDl->outStndAlloc;
29680 //TODO_SID: Currently setting max Tb size wrt to 5GTF TM3
29681 iTbs = ue->ue5gtfCb.mcs;
29682 ueDl->maxTbSz = MAX_5GTF_TB_SIZE * ue->ue5gtfCb.rank;
29683 ueDl->maxRb = MAX_5GTF_PRBS;
29685 ueDl->outStndAlloc += bo;
29686 /* consider Cumulative amount of this BO and bytes so far allocated */
29687 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbSz/8);
29688 /* Get the number of REs needed for this bo. */
29689 //noRes = ((bo * 8 * 1024) / eff);
29691 /* Get the number of RBs needed for this transmission */
29692 /* Number of RBs = No of REs / No of REs per RB */
29693 //tempNumRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29694 tempNumRb = MAX_5GTF_PRBS;
29695 tbSz = RGSCH_MIN(bo, (rgSch5gtfTbSzTbl[iTbs]/8) * ue->ue5gtfCb.rank);
29697 /* DwPts Scheduling Changes End */
29698 *effBo = RGSCH_MIN(tbSz - oldReq, reqBytes);
29701 //RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs);
29706 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbSz, \
29707 iTbs, imcs, tbInfo, ue->ue5gtfCb.rank);
29708 *numRb = (U8) tempNumRb;
29710 /* Update the subframe Allocated BW field */
29711 subFrm->bwAssigned = subFrm->bwAssigned + tempNumRb - allocInfo->rbsReq;
29718 * @brief This function is invoked in the event of any TB's allocation
29719 * being underutilized by the specific scheduler. Here we reduce iMcs
29720 * to increase redundancy and hence increase reception quality at UE.
29724 * Function: rgSCHCmnRdcImcsTxTb
29725 * Purpose: This function shall reduce the iMcs in accordance with
29726 * the total consumed bytes by the UE at allocation
29729 * Invoked by: UE DL Allocation finalization routine
29730 * of specific scheduler.
29732 * @param[in] RgSchDlRbAlloc *allocInfo
29733 * @param[in] U8 tbInfoIdx
29734 * @param[in] U32 cnsmdBytes
29739 PUBLIC Void rgSCHCmnRdcImcsTxTb
29741 RgSchDlRbAlloc *allocInfo,
29746 PUBLIC Void rgSCHCmnRdcImcsTxTb(allocInfo, tbInfoIdx, cnsmdBytes)
29747 RgSchDlRbAlloc *allocInfo;
29753 /*The below functionality is not needed.*/
29758 TRC2(rgSCHCmnRdcImcsTxTb);
29760 iTbs = allocInfo->tbInfo[tbInfoIdx].iTbs;
29761 noLyr = allocInfo->tbInfo[tbInfoIdx].noLyr;
29762 numRb = allocInfo->rbsAlloc;
29765 if ((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) == cnsmdBytes)
29770 /* Get iTbs as suitable for the consumed bytes */
29771 while((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) > cnsmdBytes)
29775 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].\
29776 tbCb->dlGrnt.iMcs);
29782 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].tbCb->dlGrnt.iMcs);
29789 * @brief This function determines the RBs and Bytes required for
29790 * Transmission on 2 CWs.
29794 * Function: rgSCHCmnDlAlloc2CwTxRb
29795 * Purpose: This function determines the RBs and Bytes required
29796 * for Transmission of DL SVC BO on 2 CWs.
29797 * Also, takes care of SVC by SVC allocation by tracking
29798 * previous SVCs allocations.
29799 * Returns RFAILED if BO not satisfied at all.
29801 * Invoked by: TM3 and TM4 DL UE Allocation
29803 * @param[in] RgSchCellCb *cell
29804 * @param[in] RgSchDlSf *subFrm
29805 * @param[in] RgSchUeCb *ue
29806 * @param[in] RgSchDlHqProcCb *proc
29807 * @param[in] RgSchDlHqProcCb bo
29808 * @param[out] U8 *numRb
29809 * @param[out] U32 *effBo
29814 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb
29819 RgSchDlHqProcCb *proc,
29825 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, numRbRef, effBo)
29829 RgSchDlHqProcCb *proc;
29841 RgSchCmnDlCell *cellDl;
29842 RgSchCmnDlUe *ueDl;
29843 RgSchDlRbAlloc *allocInfo;
29846 /* Fix: MUE_PERTTI_DL */
29848 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
29849 U8 cfi = cellSch->dl.currCfi;
29856 TRC2(rgSCHCmnDlAlloc2CwTxRb);
29859 cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
29860 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29861 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29862 oldReq = ueDl->outStndAlloc;
29865 if (ueDl->maxTbBits > ue->dl.aggTbBits)
29867 availBits = ueDl->maxTbBits - ue->dl.aggTbBits;
29869 /* check if we can further allocate to this UE */
29870 if ((ue->dl.aggTbBits >= ueDl->maxTbBits) ||
29871 (allocInfo->tbInfo[0].bytesReq >= ueDl->maxTbSz/8) ||
29872 (allocInfo->tbInfo[1].bytesReq >= ueDl->maxTbSz/8) ||
29873 (allocInfo->rbsReq >= ueDl->maxRb))
29875 RLOG_ARG0(L_DEBUG,DBG_CELLID,cell->cellId,
29876 "rgSCHCmnDlAllocRb(): UEs max allocation exceed");
29880 noLyr1 = ueDl->mimoInfo.cwInfo[0].noLyr;
29881 noLyr2 = ueDl->mimoInfo.cwInfo[1].noLyr;
29883 /* If there is no CFI change, continue to use the BLER based
29885 if (ueDl->lastCfi == cfi)
29887 iTbs1 = ueDl->mimoInfo.cwInfo[0].iTbs[noLyr1 - 1];
29888 iTbs2 = ueDl->mimoInfo.cwInfo[1].iTbs[noLyr2 - 1];
29892 U8 cqi = ueDl->mimoInfo.cwInfo[0].cqi;
29894 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 0, noLyr1);
29896 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 0, noLyr1);
29899 cqi = ueDl->mimoInfo.cwInfo[1].cqi;
29901 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 1, noLyr2);
29903 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 1, noLyr2);
29907 /*ccpu00131191 and ccpu00131317 - Fix for RRC Reconfig failure
29908 * issue for VoLTE call */
29909 //if ((proc->hasDcch) || (TRUE == rgSCHLaaSCellEnabled(cell)))
29929 else if(!cellSch->dl.isDlFreqSel)
29932 /* for Tdd reduce iTbs only for SF0. SF5 contains only
29933 * SSS and can be ignored */
29934 if (subFrm->sfNum == 0)
29936 (iTbs1 > 1)? (iTbs1 -= 1) : (iTbs1 = 0);
29937 (iTbs2 > 1)? (iTbs2 -= 1) : (iTbs2 = 0);
29939 /* For SF 3 and 8 CRC is getting failed in DL.
29940 Need to do proper fix after the replay from
29942 #ifdef CA_PHY_BRDCM_61765
29943 if ((subFrm->sfNum == 3) || (subFrm->sfNum == 8))
29945 (iTbs1 > 2)? (iTbs1 -= 2) : (iTbs1 = 0);
29946 (iTbs2 > 2)? (iTbs2 -= 2) : (iTbs2 = 0);
29954 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
29956 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
29960 eff1 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr1 - 1][cfi]))[iTbs1];
29961 eff2 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr2 - 1][cfi]))[iTbs2];
29964 bo = RGSCH_MIN(bo,availBits/8);
29965 ueDl->outStndAlloc += bo;
29966 /* consider Cumulative amount of this BO and bytes so far allocated */
29967 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbBits/8);
29968 bo = RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff1)/(eff1+eff2)),
29970 RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff2)/(eff1+eff2)),
29971 (ueDl->maxTbSz)/8) +
29972 1; /* Add 1 to adjust the truncation at weighted averaging */
29973 /* Get the number of REs needed for this bo. */
29974 noRes = ((bo * 8 * 1024) / (eff1 + eff2));
29976 /* Get the number of RBs needed for this transmission */
29977 /* Number of RBs = No of REs / No of REs per RB */
29978 numRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29979 /* Cannot exceed the maximum number of RBs per UE */
29980 if (numRb > ueDl->maxRb)
29982 numRb = ueDl->maxRb;
29987 if(RFAILED == rgSCHLaaCmn2CwAdjustPrb(allocInfo, boTmp, &numRb, ueDl, noLyr1, noLyr2, iTbs1, iTbs2))
29990 while ((numRb <= ueDl->maxRb) &&
29991 (rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1] <= ueDl->maxTbSz) &&
29992 (rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1] <= ueDl->maxTbSz) &&
29993 ((rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8 +
29994 rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8) <= bo))
30000 availBw = subFrm->bw - subFrm->bwAssigned;
30001 /* Cannot exceed the total number of RBs in the cell */
30002 if ((S16)(numRb - allocInfo->rbsReq) > availBw)
30004 numRb = availBw + allocInfo->rbsReq;
30006 tb1Sz = rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8;
30007 tb2Sz = rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8;
30008 /* DwPts Scheduling Changes Start */
30010 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
30012 /* Max Rb for Special Sf is approximated as 4/3 of maxRb */
30013 rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, (U8*)&numRb, ueDl->maxRb*4/3,
30014 &iTbs1, &iTbs2, noLyr1,
30015 noLyr2, &tb1Sz, &tb2Sz, cfi);
30016 /* Check for available Bw */
30017 if ((S16)numRb - allocInfo->rbsReq > availBw)
30019 numRb = availBw + allocInfo->rbsReq;
30020 tb1Sz = rgTbSzTbl[noLyr1-1][iTbs1][RGSCH_MAX(numRb*3/4,1)-1]/8;
30021 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs2][RGSCH_MAX(numRb*3/4,1)-1]/8;
30025 /* DwPts Scheduling Changes End */
30026 /* Update the subframe Allocated BW field */
30027 subFrm->bwAssigned = subFrm->bwAssigned + numRb - \
30030 *effBo = RGSCH_MIN((tb1Sz + tb2Sz) - oldReq, reqBytes);
30033 if (ROK != rgSCHLaaCmn2TBPrbCheck(allocInfo, tb1Sz, tb2Sz, boTmp, effBo, iTbs1, iTbs2, numRb, proc))
30039 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs1, imcs1);
30040 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs2, imcs2);
30041 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tb1Sz, \
30042 iTbs1, imcs1, &proc->tbInfo[0], noLyr1);
30043 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30044 iTbs2, imcs2, &proc->tbInfo[1], noLyr2);
30045 *numRbRef = (U8)numRb;
30053 * @brief This function determines the RBs and Bytes required for
30054 * Transmission & Retransmission on 2 CWs.
30058 * Function: rgSCHCmnDlAlloc2CwTxRetxRb
30059 * Purpose: This function determines the RBs and Bytes required
30060 * for Transmission & Retransmission on 2 CWs. Allocate
30061 * RETX TB on a better CW and restrict new TX TB by
30063 * Returns RFAILED if BO not satisfied at all.
30065 * Invoked by: TM3 and TM4 DL UE Allocation
30067 * @param[in] RgSchCellCb *cell
30068 * @param[in] RgSchDlSf *subFrm
30069 * @param[in] RgSchUeCb *ue
30070 * @param[in] RgSchDlHqTbCb *reTxTb
30071 * @param[in] RgSchDlHqTbCb *txTb
30072 * @param[out] U8 *numRb
30073 * @param[out] U32 *effBo
30078 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb
30083 RgSchDlHqTbCb *reTxTb,
30084 RgSchDlHqTbCb *txTb,
30089 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, reTxTb, txTb, numRb,\
30094 RgSchDlHqTbCb *reTxTb;
30095 RgSchDlHqTbCb *txTb;
30100 RgSchCmnDlUe *ueDl;
30101 RgSchDlRbAlloc *allocInfo;
30105 RgSchCmnDlUeCwInfo *otherCw;
30107 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
30108 U8 cfi = cellDl->currCfi;
30111 TRC2(rgSCHCmnDlAlloc2CwTxRetxRb);
30113 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
30114 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30115 otherCw = &ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)];
30118 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30119 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30121 availBw = subFrm->bw - subFrm->bwAssigned;
30122 *numRb = reTxTb->dlGrnt.numRb;
30124 #ifdef XEON_TDD_SPCL
30125 *numRb = (reTxTb->initTxNumRbs);
30126 if(reTxTb->sfType == RG_SCH_SPL_SF_DATA && subFrm->sfType != RG_SCH_SPL_SF_DATA)
30128 *numRb = (reTxTb->initTxNumRbs*3/4);
30132 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30138 if ((S16)*numRb > availBw)
30142 /* Update the subframe Allocated BW field */
30143 subFrm->bwAssigned += *numRb;
30144 noLyr2 = otherCw->noLyr;
30145 RG_SCH_CMN_GET_MCS_FOR_RETX(reTxTb, imcs1);
30147 /* If there is no CFI change, continue to use the BLER based
30149 if (ueDl->lastCfi == cfi)
30151 iTbs = otherCw->iTbs[noLyr2-1];
30156 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, otherCw->cqi, cfi,
30157 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30159 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, otherCw->cqi, cfi,
30160 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30163 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs][*numRb-1]/8;
30164 /* DwPts Scheduling Changes Start */
30167 /* DwPts Scheduling Changes End */
30168 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs2);
30170 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], reTxTb->tbSz, \
30171 0, imcs1, reTxTb, reTxTb->numLyrs);
30173 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30174 iTbs, imcs2, txTb, noLyr2);
30176 *effBo = reTxTb->tbSz + tb2Sz;
30183 * @brief This function determines the RBs and Bytes required for BO
30184 * Retransmission on 2 CWs.
30188 * Function: rgSCHCmnDlAlloc2CwRetxRb
30189 * Purpose: This function determines the RBs and Bytes required
30190 * for BO Retransmission on 2 CWs. Allocate larger TB
30191 * on a better CW and check if the smaller TB can be
30192 * accomodated on the other CW.
30193 * Returns RFAILED if BO not satisfied at all.
30195 * Invoked by: Common Scheduler
30197 * @param[in] RgSchCellCb *cell
30198 * @param[in] RgSchDlSf *subFrm
30199 * @param[in] RgSchUeCb *ue
30200 * @param[in] RgSchDlHqProcCb *proc
30201 * @param[out] U8 *numRb
30202 * @param[out] Bool *swpFlg
30203 * @param[out] U32 *effBo
30208 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb
30213 RgSchDlHqProcCb *proc,
30219 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc,\
30220 numRb, swpFlg, effBo)
30224 RgSchDlHqProcCb *proc;
30230 RgSchDlRbAlloc *allocInfo;
30233 RgSchDlHqTbCb *lrgTbInfo, *othrTbInfo;
30235 TRC2(rgSCHCmnDlAlloc2CwRetxRb);
30237 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30240 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30241 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30243 lrgTbInfo = &proc->tbInfo[0];
30244 othrTbInfo = &proc->tbInfo[1];
30245 *numRb = lrgTbInfo->dlGrnt.numRb;
30246 #ifdef XEON_TDD_SPCL
30247 if((lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA || othrTbInfo->sfType == RG_SCH_SPL_SF_DATA))
30249 if(lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA)
30251 *numRb = (lrgTbInfo->initTxNumRbs);
30255 *numRb = (othrTbInfo->initTxNumRbs);
30258 if(subFrm->sfType != RG_SCH_SPL_SF_DATA)
30260 *numRb = (*numRb)*3/4;
30265 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30270 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30274 /* Update the subframe Allocated BW field */
30275 subFrm->bwAssigned += *numRb;
30276 RG_SCH_CMN_GET_MCS_FOR_RETX(lrgTbInfo, imcs1);
30277 RG_SCH_CMN_GET_MCS_FOR_RETX(othrTbInfo, imcs2);
30278 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], lrgTbInfo->tbSz, \
30279 0, imcs1, lrgTbInfo, lrgTbInfo->numLyrs);
30280 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], othrTbInfo->tbSz, \
30281 0, imcs2, othrTbInfo, othrTbInfo->numLyrs);
30282 *effBo = lrgTbInfo->tbSz + othrTbInfo->tbSz;
30291 * @brief This function determines the RBs and Bytes required for BO
30292 * Retransmission on 1 CW.
30296 * Function: rgSCHCmnDlAlloc1CwRetxRb
30297 * Purpose: This function determines the RBs and Bytes required
30298 * for BO Retransmission on 1 CW, the first CW.
30299 * Returns RFAILED if BO not satisfied at all.
30301 * Invoked by: Common Scheduler
30303 * @param[in] RgSchCellCb *cell
30304 * @param[in] RgSchDlSf *subFrm
30305 * @param[in] RgSchUeCb *ue
30306 * @param[in] RgSchDlHqTbCb *tbInfo
30307 * @param[in] U8 noLyr
30308 * @param[out] U8 *numRb
30309 * @param[out] U32 *effBo
30314 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb
30319 RgSchDlHqTbCb *tbInfo,
30325 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, tbInfo, noLyr,\
30330 RgSchDlHqTbCb *tbInfo;
30336 RgSchDlRbAlloc *allocInfo;
30339 TRC2(rgSCHCmnDlAlloc1CwRetxRb);
30341 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30344 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30345 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30347 *numRb = tbInfo->dlGrnt.numRb;
30348 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30352 /* Update the subframe Allocated BW field */
30353 subFrm->bwAssigned += *numRb;
30354 imcs = tbInfo->dlGrnt.iMcs;
30355 allocInfo->dciFormat = tbInfo->dlGrnt.dciFormat;
30356 /* Fix: For a RETX TB the iTbs is irrelevant, hence setting 0 */
30357 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbInfo->tbSz, \
30358 0, imcs, tbInfo, tbInfo->numLyrs);
30359 *effBo = tbInfo->tbSz;
30367 * @brief This function is called to handle Release PDCCH feedback for SPS UE
30371 * Function: rgSCHCmnDlRelPdcchFbk
30372 * Purpose: Invokes SPS module to handle release PDCCH feedback
30376 * @param[in] RgSchCellCb *cell
30377 * @param[in] RgSchUeCb *ue
30378 * @param[in] Bool isAck
30383 PUBLIC Void rgSCHCmnDlRelPdcchFbk
30390 PUBLIC Void rgSCHCmnDlRelPdcchFbk(cell, ue, isAck)
30397 TRC2(rgSCHCmnDlRelPdcchFbk);
30398 rgSCHCmnSpsDlRelPdcchFbk(cell, ue, isAck);
30405 * @brief This function is invoked to handle Ack processing for a HARQ proc.
30409 * Function: rgSCHCmnDlProcAck
30410 * Purpose: DTX processing for HARQ proc
30414 * @param[in] RgSchCellCb *cell
30415 * @param[in] RgSchDlHqProcCb *hqP
30420 PUBLIC Void rgSCHCmnDlProcAck
30423 RgSchDlHqProcCb *hqP
30426 PUBLIC Void rgSCHCmnDlProcAck(cell, hqP)
30428 RgSchDlHqProcCb *hqP;
30432 TRC2(rgSCHCmnDlProcAck);
30434 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
30436 /* Invoke SPS module if SPS service was scheduled for this HARQ proc */
30437 rgSCHCmnSpsDlProcAck(cell, hqP);
30441 #ifdef RGSCH_SPS_STATS
30442 extern U32 rgSchStatCrntiCeRcvCnt;
30445 * @brief This function is invoked to handle CRNTI CE reception for an UE
30449 * Function: rgSCHCmnHdlCrntiCE
30450 * Purpose: Handle CRNTI CE reception
30454 * @param[in] RgSchCellCb *cell
30455 * @param[in] RgSchDlHqProcCb *hqP
30460 PUBLIC Void rgSCHCmnHdlCrntiCE
30466 PUBLIC Void rgSCHCmnHdlCrntiCE(cell, ue)
30472 TRC2(rgSCHCmnHdlCrntiCE);
30473 #ifdef RGSCH_SPS_STATS
30474 rgSchStatCrntiCeRcvCnt++;
30477 /* When UL sync lost happened due to TA timer expiry UE is being moved to
30478 PDCCH order inactivity list.But when CRNTI CE received in msg3 from UE
30479 we are not moving UE into active state due to that RRC Reconfiguration is
30481 So here we are moving UE to active list whenever we receive the CRNTI CE and
30483 /* CR ccpu00144525 */
30484 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
30486 /* Activate this UE if it was inactive */
30487 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30488 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30491 /* Handling is same as reception of UE RESET for both DL and UL */
30492 if (ue->dl.dlSpsCfg.isDlSpsEnabled)
30494 rgSCHCmnSpsDlUeReset(cell, ue);
30496 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30498 rgSCHCmnSpsUlUeReset(cell, ue);
30506 * @brief This function is called to handle relInd from MAC for a UE
30510 * Function: rgSCHCmnUlSpsRelInd
30511 * Purpose: Invokes SPS module to handle UL SPS release for a UE
30513 * Invoked by: SCH_UTL
30515 * @param[in] RgSchCellCb *cell
30516 * @param[in] RgSchUeCb *ue
30517 * @param[in] Bool isExplRel
30522 PUBLIC Void rgSCHCmnUlSpsRelInd
30529 PUBLIC Void rgSCHCmnUlSpsRelInd(cell, ue, isExplRel)
30536 TRC2(rgSCHCmnUlSpsRelInd);
30537 rgSCHCmnSpsUlProcRelInd(cell, ue, isExplRel);
30540 } /* end of rgSCHCmnUlSpsRelInd */
30543 * @brief This function is called to handle SPS Activate Ind from MAC for a UE
30547 * Function: rgSCHCmnUlSpsActInd
30548 * Purpose: Invokes SPS module to handle UL SPS activate for a UE
30550 * Invoked by: SCH_UTL
30552 * @param[in] RgSchCellCb *cell
30553 * @param[in] RgSchUeCb *ue
30558 PUBLIC Void rgSCHCmnUlSpsActInd
30565 PUBLIC Void rgSCHCmnUlSpsActInd(cell, ue,spsSduSize)
30572 TRC2(rgSCHCmnUlSpsActInd);
30574 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30576 rgSCHCmnSpsUlProcActInd(cell, ue,spsSduSize);
30580 } /* end of rgSCHCmnUlSpsActInd */
30583 * @brief This function is called to handle CRC in UL for UEs
30584 * undergoing SPS release
30588 * Function: rgSCHCmnUlCrcInd
30589 * Purpose: Invokes SPS module to handle CRC in UL for SPS UE
30591 * Invoked by: SCH_UTL
30593 * @param[in] RgSchCellCb *cell
30594 * @param[in] RgSchUeCb *ue
30595 * @param[in] CmLteTimingInfo crcTime
30600 PUBLIC Void rgSCHCmnUlCrcInd
30604 CmLteTimingInfo crcTime
30607 PUBLIC Void rgSCHCmnUlCrcInd(cell, ue, crcTime)
30610 CmLteTimingInfo crcTime;
30614 TRC2(rgSCHCmnUlCrcInd);
30615 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30617 rgSCHCmnSpsUlProcCrcInd(cell, ue, crcTime);
30621 } /* end of rgSCHCmnUlCrcFailInd */
30624 * @brief This function is called to handle CRC failure in UL
30628 * Function: rgSCHCmnUlCrcFailInd
30629 * Purpose: Invokes SPS module to handle CRC failure in UL for SPS UE
30631 * Invoked by: SCH_UTL
30633 * @param[in] RgSchCellCb *cell
30634 * @param[in] RgSchUeCb *ue
30635 * @param[in] CmLteTimingInfo crcTime
30640 PUBLIC Void rgSCHCmnUlCrcFailInd
30644 CmLteTimingInfo crcTime
30647 PUBLIC Void rgSCHCmnUlCrcFailInd(cell, ue, crcTime)
30650 CmLteTimingInfo crcTime;
30654 TRC2(rgSCHCmnUlCrcFailInd);
30655 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30657 rgSCHCmnSpsUlProcDtxInd(cell, ue, crcTime);
30661 } /* end of rgSCHCmnUlCrcFailInd */
30663 #endif /* LTEMAC_SPS */
30666 * @brief BCH,BCCH,PCCH Dowlink Scheduling Handler.
30670 * Function: rgSCHCmnDlBcchPcchAlloc
30671 * Purpose: This function calls common scheduler APIs to
30672 * schedule for BCCH/PCCH.
30673 * It then invokes Allocator for actual RB
30674 * allocations. It processes on the actual resources allocated
30675 * against requested to the allocator module.
30677 * Invoked by: Common Scheduler
30679 * @param[in] RgSchCellCb *cell
30683 PRIVATE Void rgSCHCmnDlBcchPcchAlloc
30688 PRIVATE Void rgSCHCmnDlBcchPcchAlloc(cell)
30693 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
30695 #ifdef LTEMAC_HDFDD
30696 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
30698 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
30701 RgInfSfAlloc *nextsfAlloc = &(cell->sfAllocArr[nextSfIdx]);
30702 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30703 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
30705 TRC2(rgSCHCmnDlBcchPcchAlloc);
30708 /*Reset the bitmask for BCCH/PCCH*/
30709 rgSCHUtlResetSfAlloc(nextsfAlloc,TRUE,FALSE);
30710 #ifndef DISABLE_MIB_SIB /* Not sending MIB and SIB to CL */
30712 rgSCHChkNUpdSiCfg(cell);
30713 rgSCHSelectSi(cell);
30716 /*Perform the scheduling for BCCH,PCCH*/
30717 rgSCHCmnDlBcchPcch(cell, allocInfo, nextsfAlloc);
30719 /* Call common allocator for RB Allocation */
30720 rgSCHBcchPcchDlRbAlloc(cell, allocInfo);
30722 /* Finalize the Allocations for reqested Against alloced */
30723 rgSCHCmnDlBcchPcchFnlz(cell, allocInfo);
30724 #endif /* DISABLE_MIB_SIB */
30729 * @brief Handles RB allocation for BCCH/PCCH for downlink.
30733 * Function : rgSCHBcchPcchDlRbAlloc
30735 * Invoking Module Processing:
30736 * - This function is invoked for DL RB allocation of BCCH/PCCH
30738 * Processing Steps:
30739 * - If cell is frequency selecive,
30740 * - Call rgSCHDlfsBcchPcchAllocRb().
30742 * - Do the processing
30744 * @param[in] RgSchCellCb *cell
30745 * @param[in] RgSchDlRbAllocInfo *allocInfo
30750 PRIVATE Void rgSCHBcchPcchDlRbAlloc
30753 RgSchCmnDlRbAllocInfo *allocInfo
30756 PRIVATE Void rgSCHBcchPcchDlRbAlloc(cell, allocInfo)
30758 RgSchCmnDlRbAllocInfo *allocInfo;
30761 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30763 TRC2(rgSCHBcchPcchDlRbAlloc);
30766 if (cellSch->dl.isDlFreqSel)
30768 cellSch->apisDlfs->rgSCHDlfsBcchPcchAllocRb(cell, allocInfo);
30772 rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo);
30779 * @brief Handles RB allocation for BCCH,PCCH for frequency
30780 * non-selective cell.
30784 * Function : rgSCHCmnNonDlfsBcchPcchRbAlloc
30786 * Invoking Module Processing:
30787 * - SCH shall invoke this if downlink frequency selective is disabled for
30788 * the cell for RB allocation.
30789 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
30790 * estimate and subframe for each allocation to be made to SCH.
30792 * Processing Steps:
30793 * - Allocate sequentially for BCCH,PCCH common channels.
30795 * @param[in] RgSchCellCb *cell
30796 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
30801 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc
30804 RgSchCmnDlRbAllocInfo *allocInfo
30807 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo)
30809 RgSchCmnDlRbAllocInfo *allocInfo;
30812 RgSchDlRbAlloc *reqAllocInfo;
30814 TRC2(rgSCHCmnNonDlfsBcchPcchRbAlloc);
30817 /* Allocate for PCCH */
30818 reqAllocInfo = &(allocInfo->pcchAlloc);
30819 if (reqAllocInfo->rbsReq)
30821 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30823 /* Allocate for BCCH on DLSCH */
30824 reqAllocInfo = &(allocInfo->bcchAlloc);
30825 if (reqAllocInfo->rbsReq)
30827 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30835 * @brief This function implements the handling to check and
30836 * update the SI cfg at the start of the modificiation period.
30840 * Function: rgSCHChkNUpdSiCfg
30841 * Purpose: This function implements handling for update of SI Cfg
30842 * at the start of modification period.
30844 * Invoked by: Scheduler
30846 * @param[in] RgSchCellCb* cell
30852 PRIVATE Void rgSCHChkNUpdSiCfg
30857 PRIVATE Void rgSCHChkNUpdSiCfg(cell)
30861 CmLteTimingInfo pdSchTmInfo;
30863 TRC2(rgSCHChkNUpdSiCfg);
30866 pdSchTmInfo = cell->crntTime;
30867 #ifdef LTEMAC_HDFDD
30868 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
30869 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
30870 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
30872 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA);
30876 /* Updating the SIB1 for Warning SI message immediately after it is received
30877 * from application. No need to wait for next modification period.
30879 if((pdSchTmInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
30880 && (RGSCH_SIB1_TX_SF_NUM == (pdSchTmInfo.subframe % RGSCH_NUM_SUB_FRAMES)))
30882 /*Check whether SIB1 with PWS has been updated*/
30883 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_PWS_UPD)
30885 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30886 cell->siCb.newSiInfo.sib1Info.sib1);
30887 cell->siCb.crntSiInfo.sib1Info.mcs =
30888 cell->siCb.newSiInfo.sib1Info.mcs;
30889 cell->siCb.crntSiInfo.sib1Info.nPrb =
30890 cell->siCb.newSiInfo.sib1Info.nPrb;
30891 cell->siCb.crntSiInfo.sib1Info.msgLen =
30892 cell->siCb.newSiInfo.sib1Info.msgLen;
30893 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_PWS_UPD;
30897 /*Check if this SFN and SF No marks the start of next modification
30898 period. If current SFN,SF No doesn't marks the start of next
30899 modification period, then return. */
30900 if(!((pdSchTmInfo.sfn % cell->siCfg.modPrd == 0)
30901 && (0 == pdSchTmInfo.subframe)))
30902 /*if(!((((pdSchTmInfo.hSfn * 1024) + pdSchTmInfo.sfn) % cell->siCfg.modPrd == 0)
30903 && (0 == pdSchTmInfo.subframe)))*/
30908 /*Check whether MIB has been updated*/
30909 if(cell->siCb.siBitMask & RGSCH_SI_MIB_UPD)
30911 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.mib,
30912 cell->siCb.newSiInfo.mib);
30913 cell->siCb.siBitMask &= ~RGSCH_SI_MIB_UPD;
30916 /*Check whether SIB1 has been updated*/
30917 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_UPD)
30919 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30920 cell->siCb.newSiInfo.sib1Info.sib1);
30921 cell->siCb.crntSiInfo.sib1Info.mcs = cell->siCb.newSiInfo.sib1Info.mcs;
30922 cell->siCb.crntSiInfo.sib1Info.nPrb = cell->siCb.newSiInfo.sib1Info.nPrb;
30923 cell->siCb.crntSiInfo.sib1Info.msgLen =
30924 cell->siCb.newSiInfo.sib1Info.msgLen;
30925 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_UPD;
30928 /*Check whether SIs have been updated*/
30929 if(cell->siCb.siBitMask & RGSCH_SI_SI_UPD)
30933 /*Check if SI cfg have been modified And Check if numSi have
30934 been changed, if yes then we would need to update the
30935 pointers for all the SIs */
30936 if((cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD) &&
30937 (cell->siCfg.numSi != cell->siCb.newSiCfg.numSi))
30939 for(idx = 0;idx < cell->siCb.newSiCfg.numSi;idx++)
30941 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30942 cell->siCb.newSiInfo.siInfo[idx].si);
30943 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30944 cell->siCb.siArray[idx].isWarningSi = FALSE;
30946 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30947 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30948 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30951 /*If numSi have been reduced then we need to free the
30952 pointers at the indexes in crntSiInfo which haven't
30953 been exercised. If numSi has increased then nothing
30954 additional is requires as above handling has taken
30956 if(cell->siCfg.numSi > cell->siCb.newSiCfg.numSi)
30958 for(idx = cell->siCb.newSiCfg.numSi;
30959 idx < cell->siCfg.numSi;idx++)
30961 RGSCH_FREE_MSG(cell->siCb.crntSiInfo.siInfo[idx].si);
30962 cell->siCb.siArray[idx].si = NULLP;
30968 /*numSi has not been updated, we just need to update the
30969 pointers for the SIs which are set to NON NULLP */
30970 /*ccpu00118260 - Correct Update of SIB2 */
30971 for(idx = 0;idx < cell->siCfg.numSi;idx++)
30973 if(NULLP != cell->siCb.newSiInfo.siInfo[idx].si)
30975 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30976 cell->siCb.newSiInfo.siInfo[idx].si);
30978 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30979 cell->siCb.siArray[idx].isWarningSi = FALSE;
30980 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30981 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30982 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30986 cell->siCb.siBitMask &= ~RGSCH_SI_SI_UPD;
30989 /*Check whether SI cfg have been updated*/
30990 if(cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD)
30992 cell->siCfg = cell->siCb.newSiCfg;
30993 cell->siCb.siBitMask &= ~RGSCH_SI_SICFG_UPD;
31001 * @brief This function implements the selection of the SI
31002 * that is to be scheduled.
31006 * Function: rgSCHSelectSi
31007 * Purpose: This function implements the selection of SI
31008 * that is to be scheduled.
31010 * Invoked by: Scheduler
31012 * @param[in] RgSchCellCb* cell
31018 PRIVATE Void rgSCHSelectSi
31023 PRIVATE Void rgSCHSelectSi(cell)
31027 CmLteTimingInfo crntTmInfo;
31032 TRC2(rgSCHSelectSi);
31035 crntTmInfo = cell->crntTime;
31036 #ifdef LTEMAC_HDFDD
31037 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31038 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31039 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31041 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA);
31044 siWinSize = cell->siCfg.siWinSize;
31046 /* Select SI only once at the starting of the new window */
31047 if(cell->siCb.inWindow)
31049 if ((crntTmInfo.sfn % cell->siCfg.minPeriodicity) == 0 &&
31050 crntTmInfo.subframe == 0)
31052 /* Reinit inWindow at the beginning of every SI window */
31053 cell->siCb.inWindow = siWinSize - 1;
31057 cell->siCb.inWindow--;
31061 else /* New window. Re-init the winSize counter with the window length */
31063 if((cell->siCb.siArray[cell->siCb.siCtx.siId - 1].isWarningSi == TRUE)&&
31064 (cell->siCb.siCtx.retxCntRem != 0))
31066 rgSCHUtlFreeWarningSiPdu(cell);
31067 cell->siCb.siCtx.warningSiFlag = FALSE;
31070 cell->siCb.inWindow = siWinSize - 1;
31073 x = rgSCHCmnGetSiSetId(crntTmInfo.sfn, crntTmInfo.subframe,
31074 cell->siCfg.minPeriodicity);
31076 /* Window Id within a SI set. This window Id directly maps to a
31078 windowId = (((crntTmInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) +
31079 crntTmInfo.subframe) - (x * (cell->siCfg.minPeriodicity * 10)))
31082 if(windowId >= RGR_MAX_NUM_SI)
31085 /* Update the siCtx if there is a valid SI and its periodicity
31087 if (NULLP != cell->siCb.siArray[windowId].si)
31089 /* Warning SI Periodicity is same as SIB2 Periodicity */
31090 if(((cell->siCb.siArray[windowId].isWarningSi == FALSE) &&
31091 (x % (cell->siCfg.siPeriodicity[windowId]
31092 /cell->siCfg.minPeriodicity) == 0)) ||
31093 ((cell->siCb.siArray[windowId].isWarningSi == TRUE) &&
31094 (x % (cell->siCfg.siPeriodicity[0]
31095 /cell->siCfg.minPeriodicity) == 0)))
31097 cell->siCb.siCtx.siId = windowId+1;
31098 cell->siCb.siCtx.retxCntRem = cell->siCfg.retxCnt;
31099 cell->siCb.siCtx.warningSiFlag = cell->siCb.siArray[windowId].
31101 cell->siCb.siCtx.timeToTx.sfn = crntTmInfo.sfn;
31102 cell->siCb.siCtx.timeToTx.subframe = crntTmInfo.subframe;
31104 RG_SCH_ADD_TO_CRNT_TIME(cell->siCb.siCtx.timeToTx,
31105 cell->siCb.siCtx.maxTimeToTx, (siWinSize - 1))
31109 {/* Update the siCtx with invalid si Id */
31110 cell->siCb.siCtx.siId = 0;
31118 * @brief This function implements scheduler DL allocation for
31123 * Function: rgSCHDlSiSched
31124 * Purpose: This function implements scheduler for DL allocation
31127 * Invoked by: Scheduler
31129 * @param[in] RgSchCellCb* cell
31135 PRIVATE Void rgSCHDlSiSched
31138 RgSchCmnDlRbAllocInfo *allocInfo,
31139 RgInfSfAlloc *subfrmAlloc
31142 PRIVATE Void rgSCHDlSiSched(cell, allocInfo, subfrmAlloc)
31144 RgSchCmnDlRbAllocInfo *allocInfo;
31145 RgInfSfAlloc *subfrmAlloc;
31148 CmLteTimingInfo crntTimInfo;
31154 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31155 /* DwPTS Scheduling Changes Start */
31158 U8 cfi = cellDl->currCfi;
31160 /* DwPTS Scheduling Changes End */
31162 TRC2(rgSCHDlSiSched);
31165 crntTimInfo = cell->crntTime;
31166 #ifdef LTEMAC_HDFDD
31167 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31168 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31169 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31171 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA);
31174 /* Compute the subframe for which allocation is being made.
31175 Essentially, we need pointer to the dl frame for this subframe */
31176 sf = rgSCHUtlSubFrmGet(cell, crntTimInfo);
31178 /*Check if scheduling of MIB is required */
31180 /* since we are adding the MIB repetition logic for EMTC UEs, checking if
31181 * emtcEnabled or not, If enabled MIB would be repeted at as part of EMTC
31182 * feature, otherwise scheduling at (n,0) */
31183 if(0 == cell->emtcEnable)
31186 if((crntTimInfo.sfn % RGSCH_MIB_PERIODICITY == 0)
31187 && (RGSCH_MIB_TX_SF_NUM == crntTimInfo.subframe))
31190 U8 sfnOctet, mibOct2 = 0;
31192 /*If MIB has not been yet setup by Application, return*/
31193 if(NULLP == cell->siCb.crntSiInfo.mib)
31196 SFndLenMsg(cell->siCb.crntSiInfo.mib, &mibLen);
31197 sf->bch.tbSize = mibLen;
31198 /*Fill the interface information */
31199 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, NULLD, NULLD);
31201 /*Set the bits of MIB to reflect SFN */
31202 /*First get the Most signficant 8 bits of SFN */
31203 sfnOctet = (U8)(crntTimInfo.sfn >> 2);
31204 /*Get the first two octets of MIB, and then update them
31205 using the SFN octet value obtained above.*/
31206 if(ROK != SExamMsg((Data *)(&mibOct1),
31207 cell->siCb.crntSiInfo.mib, 0))
31210 if(ROK != SExamMsg((Data *)(&mibOct2),
31211 cell->siCb.crntSiInfo.mib, 1))
31214 /* ccpu00114572- Fix for improper way of MIB Octet setting for SFN */
31215 mibOct1 = (mibOct1 & 0xFC) | (sfnOctet >> 6);
31216 mibOct2 = (mibOct2 & 0x03) | (sfnOctet << 2);
31217 /* ccpu00114572- Fix ends*/
31219 /*Now, replace the two octets in MIB */
31220 if(ROK != SRepMsg((Data)(mibOct1),
31221 cell->siCb.crntSiInfo.mib, 0))
31224 if(ROK != SRepMsg((Data)(mibOct2),
31225 cell->siCb.crntSiInfo.mib, 1))
31228 /*Copy the MIB msg buff into interface buffer */
31229 SCpyMsgMsg(cell->siCb.crntSiInfo.mib,
31230 rgSchCb[cell->instIdx].rgSchInit.region,
31231 rgSchCb[cell->instIdx].rgSchInit.pool,
31232 &subfrmAlloc->cmnLcInfo.bchInfo.pdu);
31233 /* Added Dl TB count for MIB message transmission
31234 * This counter is incremented 4 times to consider
31235 * the retransmission at the PHY level on PBCH channel*/
31237 cell->dlUlTbCnt.tbTransDlTotalCnt += RG_SCH_MIB_CNT;
31244 allocInfo->bcchAlloc.schdFirst = FALSE;
31245 /*Check if scheduling of SIB1 is required.
31246 Check of (crntTimInfo.sfn % RGSCH_SIB1_PERIODICITY == 0)
31247 is not required here since the below check takes care
31248 of SFNs applicable for this one too.*/
31249 if((crntTimInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
31250 && (RGSCH_SIB1_TX_SF_NUM == crntTimInfo.subframe))
31252 /*If SIB1 has not been yet setup by Application, return*/
31253 if(NULLP == (cell->siCb.crntSiInfo.sib1Info.sib1))
31258 allocInfo->bcchAlloc.schdFirst = TRUE;
31259 mcs = cell->siCb.crntSiInfo.sib1Info.mcs;
31260 nPrb = cell->siCb.crntSiInfo.sib1Info.nPrb;
31261 msgLen = cell->siCb.crntSiInfo.sib1Info.msgLen;
31265 /*Check if scheduling of SI can be performed.*/
31266 Bool invalid = FALSE;
31268 if(cell->siCb.siCtx.siId == 0)
31271 /*Check if the Si-Window for the current Si-Context is completed*/
31272 invalid = rgSCHCmnChkPastWin(crntTimInfo, cell->siCb.siCtx.maxTimeToTx);
31275 /* LTE_ADV_FLAG_REMOVED_START */
31276 if(cell->siCb.siCtx.retxCntRem)
31278 RGSCHLOGERROR(cell->instIdx,ERRCLS_INT_PAR,ERG011,(ErrVal)cell->siCb.siCtx.siId,
31279 "rgSCHDlSiSched(): SI not scheduled and window expired");
31281 /* LTE_ADV_FLAG_REMOVED_END */
31282 if(cell->siCb.siCtx.warningSiFlag == TRUE)
31284 rgSCHUtlFreeWarningSiPdu(cell);
31285 cell->siCb.siCtx.warningSiFlag = FALSE;
31290 /*Check the timinginfo of the current SI-Context to see if its
31291 transmission can be scheduled. */
31292 if(FALSE == (rgSCHCmnChkInWin(crntTimInfo,
31293 cell->siCb.siCtx.timeToTx,
31294 cell->siCb.siCtx.maxTimeToTx)))
31299 /*Check if retransmission count has become 0*/
31300 if(0 == cell->siCb.siCtx.retxCntRem)
31305 /* LTE_ADV_FLAG_REMOVED_START */
31306 /* Check if ABS is enabled/configured */
31307 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31309 /* The pattern type is RGR_ABS_MUTE, then eNB need to blank the subframe */
31310 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
31312 /* Determine next scheduling subframe is ABS or not */
31313 if(RG_SCH_ABS_ENABLED_ABS_SF == (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern
31314 [((crntTimInfo.sfn*RGSCH_NUM_SUB_FRAMES) + crntTimInfo.subframe) % RGR_ABS_PATTERN_LEN]))
31316 /* Skip the SI scheduling to next tti */
31321 /* LTE_ADV_FLAG_REMOVED_END */
31323 /*Schedule the transmission of the current SI-Context */
31324 /*Find out the messg length for the SI message */
31325 /* warningSiFlag is to differentiate between Warning SI
31327 if((rgSCHUtlGetMcsAndNPrb(cell, &nPrb, &mcs, &msgLen)) != ROK)
31332 cell->siCb.siCtx.i = RGSCH_CALC_SF_DIFF(crntTimInfo,
31333 cell->siCb.siCtx.timeToTx);
31337 /*Get the number of rb required */
31338 /*rgSCHCmnClcRbAllocForFxdTb(cell, msgLen, cellDl->ccchCqi, &rb);*/
31339 if(cellDl->bitsPerRb==0)
31341 while ((rgTbSzTbl[0][0][rb]) < (U32) (msgLen*8))
31349 rb = RGSCH_CEIL((msgLen*8), cellDl->bitsPerRb);
31351 /* DwPTS Scheduling Changes Start */
31353 if (sf->sfType == RG_SCH_SPL_SF_DATA)
31355 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
31357 /* Calculate the less RE's because of DwPTS */
31358 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
31360 /* Increase number of RBs in Spl SF to compensate for lost REs */
31361 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
31364 /* DwPTS Scheduling Changes End */
31365 /*ccpu00115595- end*/
31366 /* Additional check to see if required RBs
31367 * exceeds the available */
31368 if (rb > sf->bw - sf->bwAssigned)
31370 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHDlSiSched(): "
31371 "BW allocation failed CRNTI:%d",RGSCH_SI_RNTI);
31375 /* Update the subframe Allocated BW field */
31376 sf->bwAssigned = sf->bwAssigned + rb;
31378 /*Fill the parameters in allocInfo */
31379 allocInfo->bcchAlloc.rnti = RGSCH_SI_RNTI;
31380 allocInfo->bcchAlloc.dlSf = sf;
31381 allocInfo->bcchAlloc.rbsReq = rb;
31382 /*ccpu00116710- MCS is not getting assigned */
31383 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
31385 /* ccpu00117510 - ADD - Assignment of nPrb and other information */
31386 allocInfo->bcchAlloc.nPrb = nPrb;
31387 allocInfo->bcchAlloc.tbInfo[0].bytesReq = msgLen;
31388 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
31391 #endif /*RGR_SI_SCH*/
31394 /* ccpu00117452 - MOD - Changed macro name from
31395 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
31396 #ifdef RGR_CQI_REPT
31398 * @brief This function Updates the DL CQI for the UE.
31402 * Function: rgSCHCmnUeDlPwrCtColltCqiRept
31403 * Purpose: Manages PUSH N CQI reporting
31404 * Step 1: Store the CQI in collation array
31405 * Step 2: Increament the tracking count
31406 * Step 3: Check is it time to to send the report
31407 * Step 4: if yes, Send StaInd to RRM
31408 * Step 4.1: Fill StaInd for sending collated N CQI rpeorts
31409 * Step 4.2: Call utility function (rgSCHUtlRgrStaInd) to send rpts to RRM
31410 * Step 4.2.1: If sending was not sucessful, return RFAILED
31411 * Step 4.2.2: If sending was sucessful, return ROK
31412 * Step 5: If no, return
31413 * Invoked by: rgSCHCmnDlCqiInd
31415 * @param[in] RgSchCellCb *cell
31416 * @param[in] RgSchUeCb *ue
31417 * @param[in] RgrUeCqiRept *ueCqiRpt
31422 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept
31426 RgrUeCqiRept *ueCqiRpt
31429 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, ueCqiRpt)
31432 RgrUeCqiRept *ueCqiRpt;
31435 U8 *cqiCount = NULLP;
31437 RgrStaIndInfo *staInfo = NULLP;
31439 TRC2(rgSCHCmnUeDlPwrCtColltCqiRept)
31441 /* Step 1: Store the CQI in collation array */
31442 /* Step 2: Increament the tracking count */
31443 cqiCount = &(ue->schCqiInfo.cqiCount);
31444 ue->schCqiInfo.cqiRept[(*cqiCount)++] =
31448 /* Step 3: Check is it time to to send the report */
31449 if(RG_SCH_CQIR_IS_TIMTOSEND_CQIREPT(ue))
31451 /* Step 4: if yes, Send StaInd to RRM */
31452 retVal = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&staInfo,
31453 sizeof(RgrStaIndInfo));
31456 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
31457 "allocate memory for sending StaInd CRNTI:%d",ue->ueId);
31461 /* Step 4.1: Fill StaInd for sending collated N CQI rpeorts */
31464 extern U32 gCqiReptToAppCount;
31465 gCqiReptToAppCount++;
31470 retVal = rgSCHUtlFillSndStaInd(cell, ue, staInfo,
31471 ue->cqiReptCfgInfo.numColltdCqiRept);
31477 } /* End of rgSCHCmnUeDlPwrCtColltCqiRept */
31479 #endif /* End of RGR_CQI_REPT */
31482 * @brief This function checks for the retransmisson
31483 * for a DTX scenario.
31490 * @param[in] RgSchCellCb *cell
31491 * @param[in] RgSchUeCb *ue
31497 PUBLIC Void rgSCHCmnChkRetxAllowDtx
31501 RgSchDlHqProcCb *proc,
31505 PUBLIC Void rgSCHCmnChkRetxAllowDtx(cell, ueCb, proc, reTxAllwd)
31508 RgSchDlHqProcCb *proc;
31512 TRC3(rgSCHCmnChkRetxAllowDtx)
31517 if ((proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX))
31519 *reTxAllwd = FALSE;
31526 * @brief API for calculating the SI Set Id
31530 * Function: rgSCHCmnGetSiSetId
31532 * This API is used for calculating the SI Set Id, as shown below
31534 * siSetId = 0 siSetId = 1
31535 * |******************|******************|---------------->
31536 * (0,0) (8,0) (16,0) (SFN, SF)
31539 * @param[in] U16 sfn
31541 * @return U16 siSetId
31544 PUBLIC U16 rgSCHCmnGetSiSetId
31551 PUBLIC U16 rgSCHCmnGetSiSetId(sfn, sf, minPeriodicity)
31554 U16 minPeriodicity;
31557 /* 80 is the minimum SI periodicity in sf. Also
31558 * all other SI periodicities are multiples of 80 */
31559 RETVALUE (((sfn * RGSCH_NUM_SUB_FRAMES_5G) + sf) / (minPeriodicity * 10));
31563 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31567 * Function: rgSCHCmnCalcDwPtsTbSz
31569 * @param[in] RgSchCellCb *cell
31570 * @param[in] U32 bo
31571 * @param[in/out] U8 *rb
31572 * @param[in/out] U8 *iTbs
31573 * @param[in] U8 lyr
31574 * @param[in] U8 cfi
31578 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz
31588 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz(cell, bo, rb, iTbs, lyr, cfi)
31598 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31599 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31600 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31602 TRC2(rgSCHCmnCalcDwPtsTbSz);
31604 /* DwPts Rb cannot exceed the cell Bw */
31605 numDwPtsRb = RGSCH_MIN(numDwPtsRb, cellDl->maxDlBwPerUe);
31607 /* Adjust the iTbs for optimum usage of the DwPts region.
31608 * Using the same iTbs adjustment will not work for all
31609 * special subframe configurations and iTbs levels. Hence use the
31610 * static iTbs Delta table for adjusting the iTbs */
31611 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs);
31615 while(rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1] < bo*8 &&
31616 numDwPtsRb < cellDl->maxDlBwPerUe)
31621 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31625 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31633 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31637 * Function: rgSCHCmnCalcDwPtsTbSz2Cw
31639 * @param[in] RgSchCellCb *cell
31640 * @param[in] U32 bo
31641 * @param[in/out] U8 *rb
31642 * @param[in] U8 maxRb
31643 * @param[in/out] U8 *iTbs1
31644 * @param[in/out] U8 *iTbs2
31645 * @param[in] U8 lyr1
31646 * @param[in] U8 lyr2
31647 * @return[in/out] U32 *tb1Sz
31648 * @return[in/out] U32 *tb2Sz
31649 * @param[in] U8 cfi
31652 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw
31667 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, rb, maxRb, iTbs1, iTbs2,
31668 lyr1, lyr2, tb1Sz, tb2Sz, cfi)
31682 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31683 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31684 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31686 TRC2(rgSCHCmnCalcDwPtsTbSz2Cw);
31688 /* DwPts Rb cannot exceed the cell Bw */
31689 numDwPtsRb = RGSCH_MIN(numDwPtsRb, maxRb);
31691 /* Adjust the iTbs for optimum usage of the DwPts region.
31692 * Using the same iTbs adjustment will not work for all
31693 * special subframe configurations and iTbs levels. Hence use the
31694 * static iTbs Delta table for adjusting the iTbs */
31695 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs1);
31696 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs2);
31698 while((rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1] +
31699 rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1])< bo*8 &&
31700 numDwPtsRb < maxRb)
31705 *tb1Sz = rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31706 *tb2Sz = rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31716 * @brief Updates the GBR LCGs when datInd is received from MAC
31720 * Function: rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31721 * Purpose: This function updates the GBR LCGs
31722 * when datInd is received from MAC.
31726 * @param[in] RgSchCellCb *cell
31727 * @param[in] RgSchUeCb *ue
31728 * @param[in] RgInfUeDatInd *datInd
31732 PUBLIC Void rgSCHCmnUpdUeDataIndLcg
31736 RgInfUeDatInd *datInd
31739 PUBLIC Void rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31742 RgInfUeDatInd *datInd;
31746 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31748 Inst inst = cell->instIdx;
31751 TRC2(rgSCHCmnUpdUeDataIndLcg);
31753 for (idx = 0; (idx < RGINF_MAX_LCG_PER_UE - 1); idx++)
31755 if (datInd->lcgInfo[idx].bytesRcvd != 0)
31757 U8 lcgId = datInd->lcgInfo[idx].lcgId;
31758 U32 bytesRcvd = datInd->lcgInfo[idx].bytesRcvd;
31760 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
31762 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
31763 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
31765 if(bytesRcvd > cmnLcg->effGbr)
31767 bytesRcvd -= cmnLcg->effGbr;
31768 cmnLcg->effDeltaMbr = (cmnLcg->effDeltaMbr > bytesRcvd) ? \
31769 (cmnLcg->effDeltaMbr - bytesRcvd) : (0);
31770 cmnLcg->effGbr = 0;
31774 cmnLcg->effGbr -= bytesRcvd;
31776 /* To keep BS updated with the amount of data received for the GBR */
31777 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31778 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31779 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr+cmnLcg->effDeltaMbr);
31781 else if(lcgId != 0)
31783 ue->ul.effAmbr = (ue->ul.effAmbr > datInd->lcgInfo[idx].bytesRcvd) ? \
31784 (ue->ul.effAmbr - datInd->lcgInfo[idx].bytesRcvd) : (0);
31785 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31786 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31787 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
31788 ue->ul.nonGbrLcgBs = (ue->ul.nonGbrLcgBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31789 (ue->ul.nonGbrLcgBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31791 ue->ul.nonLcg0Bs = (ue->ul.nonLcg0Bs > datInd->lcgInfo[idx].bytesRcvd) ? \
31792 (ue->ul.nonLcg0Bs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31801 if(TRUE == ue->isEmtcUe)
31803 if (cellSch->apisEmtcUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31805 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31812 if (cellSch->apisUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31814 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31820 /** @brief This function initializes DL allocation lists and prepares
31825 * Function: rgSCHCmnInitRbAlloc
31827 * @param [in] RgSchCellCb *cell
31833 PRIVATE Void rgSCHCmnInitRbAlloc
31838 PRIVATE Void rgSCHCmnInitRbAlloc (cell)
31842 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31843 CmLteTimingInfo frm;
31847 TRC2(rgSCHCmnInitRbAlloc);
31849 /* Initializing RgSchCmnUlRbAllocInfo structure.*/
31850 rgSCHCmnInitDlRbAllocInfo(&cellSch->allocInfo);
31852 frm = cellSch->dl.time;
31854 dlSf = rgSCHUtlSubFrmGet(cell, frm);
31856 dlSf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
31857 dlSf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
31858 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
31860 dlSf->sfBeamInfo[idx].totVrbgAllocated = 0;
31861 dlSf->sfBeamInfo[idx].totVrbgRequired = 0;
31862 dlSf->sfBeamInfo[idx].vrbgStart = 0;
31865 dlSf->remUeCnt = cellSch->dl.maxUePerDlSf;
31866 /* Updating the Subframe information in RBAllocInfo */
31867 cellSch->allocInfo.dedAlloc.dedDlSf = dlSf;
31868 cellSch->allocInfo.msg4Alloc.msg4DlSf = dlSf;
31870 /* LTE_ADV_FLAG_REMOVED_START */
31871 /* Determine next scheduling subframe is ABS or not */
31872 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31874 cell->lteAdvCb.absPatternDlIdx =
31875 ((frm.sfn*RGSCH_NUM_SUB_FRAMES_5G) + frm.subframe) % RGR_ABS_PATTERN_LEN;
31876 cell->lteAdvCb.absDlSfInfo = (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern[
31877 cell->lteAdvCb.absPatternDlIdx]);
31882 cell->lteAdvCb.absDlSfInfo = RG_SCH_ABS_DISABLED;
31884 /* LTE_ADV_FLAG_REMOVED_END */
31887 cellSch->allocInfo.ccchSduAlloc.ccchSduDlSf = dlSf;
31890 /* Update subframe-wide allocation information with SPS allocation */
31891 rgSCHCmnSpsDlUpdDlSfAllocWithSps(cell, frm, dlSf);
31900 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31905 * Function: rgSCHCmnSendTxModeInd(cell, ueUl, newTxMode)
31906 * Purpose: This function sends the TX mode Change
31907 * indication to RRM
31912 * @param[in] RgSchCellCb *cell
31913 * @param[in] RgSchUeCb *ue
31914 * @param[in] U8 newTxMode
31918 PRIVATE Void rgSCHCmnSendTxModeInd
31925 PRIVATE Void rgSCHCmnSendTxModeInd(cell, ue, newTxMode)
31931 RgmTransModeInd *txModeChgInd;
31932 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
31934 TRC2(rgSCHCmnSendTxModeInd);
31936 if(!(ueDl->mimoInfo.forceTD & RG_SCH_CMN_TD_TXMODE_RECFG))
31939 if(SGetSBuf(cell->rgmSap->sapCfg.sapPst.region,
31940 cell->rgmSap->sapCfg.sapPst.pool, (Data**)&txModeChgInd,
31941 sizeof(RgmTransModeInd)) != ROK)
31945 RG_SCH_FILL_RGM_TRANSMODE_IND(ue->ueId, cell->cellId, newTxMode, txModeChgInd);
31946 RgUiRgmChangeTransModeInd(&(cell->rgmSap->sapCfg.sapPst),
31947 cell->rgmSap->sapCfg.suId, txModeChgInd);
31950 ue->mimoInfo.txModUpChgFactor = 0;
31951 ue->mimoInfo.txModDownChgFactor = 0;
31952 ueDl->laCb[0].deltaiTbs = 0;
31958 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31963 * Function: rgSchCheckAndTriggerModeChange(cell, ueUl, iTbsNew)
31964 * Purpose: This function update and check for threashold for TM mode
31969 * @param[in] RgSchCellCb *cell
31970 * @param[in] RgSchUeCb *ue
31971 * @param[in] U8 iTbs
31975 PUBLIC Void rgSchCheckAndTriggerModeChange
31984 PUBLIC Void rgSchCheckAndTriggerModeChange(cell, ue, reportediTbs, previTbs, maxiTbs)
31992 RgrTxMode txMode; /*!< UE's Transmission Mode */
31993 RgrTxMode modTxMode; /*!< UE's Transmission Mode */
31995 TRC2(rgSchCheckAndTriggerModeChange);
31997 txMode = ue->mimoInfo.txMode;
31999 /* Check for Step down */
32000 /* Step down only when TM4 is configured. */
32001 if(RGR_UE_TM_4 == txMode)
32003 if((previTbs <= reportediTbs) && ((reportediTbs - previTbs) >= RG_SCH_MODE_CHNG_STEPDOWN_CHECK_FACTOR))
32005 ue->mimoInfo.txModDownChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32009 ue->mimoInfo.txModDownChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32012 ue->mimoInfo.txModDownChgFactor =
32013 RGSCH_MAX(ue->mimoInfo.txModDownChgFactor, -(RG_SCH_MODE_CHNG_STEPDOWN_THRSHD));
32015 if(ue->mimoInfo.txModDownChgFactor >= RG_SCH_MODE_CHNG_STEPDOWN_THRSHD)
32017 /* Trigger Mode step down */
32018 modTxMode = RGR_UE_TM_3;
32019 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32023 /* Check for Setup up */
32024 /* Step Up only when TM3 is configured, Max possible Mode is TM4*/
32025 if(RGR_UE_TM_3 == txMode)
32027 if((previTbs > reportediTbs) || (maxiTbs == previTbs))
32029 ue->mimoInfo.txModUpChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32033 ue->mimoInfo.txModUpChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32036 ue->mimoInfo.txModUpChgFactor =
32037 RGSCH_MAX(ue->mimoInfo.txModUpChgFactor, -(RG_SCH_MODE_CHNG_STEPUP_THRSHD));
32039 /* Check if TM step up need to be triggered */
32040 if(ue->mimoInfo.txModUpChgFactor >= RG_SCH_MODE_CHNG_STEPUP_THRSHD)
32042 /* Trigger mode chnage */
32043 modTxMode = RGR_UE_TM_4;
32044 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32053 * @brief Updates the GBR LCGs when datInd is received from MAC
32057 * Function: rgSCHCmnIsDlCsgPrio (cell)
32058 * Purpose: This function returns if csg UEs are
32059 * having priority at current time
32061 * Invoked by: Scheduler
32063 * @param[in] RgSchCellCb *cell
32064 * @param[in] RgSchUeCb *ue
32065 * @param[in] RgInfUeDatInd *datInd
32069 PUBLIC Bool rgSCHCmnIsDlCsgPrio
32074 PUBLIC Bool rgSCHCmnIsDlCsgPrio(cell)
32079 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
32081 TRC2(rgSCHCmnIsDlCsgPrio)
32082 /* Calculating the percentage resource allocated */
32083 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32089 if(((cmnDlCell->ncsgPrbCnt * 100) / cmnDlCell->totPrbCnt) < cell->minDlResNonCsg)
32101 * @brief Updates the GBR LCGs when datInd is received from MAC
32105 * Function: rgSCHCmnIsUlCsgPrio (cell)
32106 * Purpose: This function returns if csg UEs are
32107 * having priority at current time
32109 * Invoked by: Scheduler
32111 * @param[in] RgSchCellCb *cell
32112 * @param[in] RgSchUeCb *ue
32113 * @param[in] RgInfUeDatInd *datInd
32117 PUBLIC Bool rgSCHCmnIsUlCsgPrio
32122 PUBLIC Bool rgSCHCmnIsUlCsgPrio(cell)
32126 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
32128 TRC2(rgSCHCmnIsUlCsgPrio)
32130 /* Calculating the percentage resource allocated */
32131 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32137 if (((cmnUlCell->ncsgPrbCnt * 100) /cmnUlCell->totPrbCnt) < cell->minUlResNonCsg)
32147 /**********************************************************************
32150 **********************************************************************/