1 /*******************************************************************************
2 ################################################################################
3 # Copyright (c) [2017-2019] [Radisys] #
5 # Licensed under the Apache License, Version 2.0 (the "License"); #
6 # you may not use this file except in compliance with the License. #
7 # You may obtain a copy of the License at #
9 # http://www.apache.org/licenses/LICENSE-2.0 #
11 # Unless required by applicable law or agreed to in writing, software #
12 # distributed under the License is distributed on an "AS IS" BASIS, #
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 # See the License for the specific language governing permissions and #
15 # limitations under the License. #
16 ################################################################################
17 *******************************************************************************/
19 /************************************************************************
25 Desc: C source code for Entry point fucntions
29 **********************************************************************/
31 /** @file rg_sch_cmn.c
32 @brief This file implements the schedulers main access to MAC layer code.
35 static const char* RLOG_MODULE_NAME="MAC";
36 static int RLOG_FILE_ID=187;
37 static int RLOG_MODULE_ID=4096;
39 /* header include files -- defines (.h) */
40 #include "envopt.h" /* environment options */
41 #include "envdep.h" /* environment dependent */
42 #include "envind.h" /* environment independent */
43 #include "gen.h" /* general layer */
44 #include "ssi.h" /* system service interface */
45 #include "cm_hash.h" /* common hash list */
46 #include "cm_llist.h" /* common linked list library */
47 #include "cm_err.h" /* common error */
48 #include "cm_lte.h" /* common LTE */
55 #include "rg_sch_err.h"
56 #include "rg_sch_inf.h"
58 #include "rg_sch_cmn.h"
59 #include "rl_interface.h"
60 #include "rl_common.h"
62 /* header/extern include files (.x) */
63 #include "gen.x" /* general layer typedefs */
64 #include "ssi.x" /* system services typedefs */
65 #include "cm5.x" /* common timers */
66 #include "cm_hash.x" /* common hash list */
67 #include "cm_lib.x" /* common library */
68 #include "cm_llist.x" /* common linked list */
69 #include "cm_mblk.x" /* memory management */
70 #include "cm_tkns.x" /* common tokens */
71 #include "cm_lte.x" /* common tokens */
72 #include "tfu.x" /* TFU types */
73 #include "lrg.x" /* layer management typedefs for MAC */
74 #include "rgr.x" /* layer management typedefs for MAC */
75 #include "rgm.x" /* layer management typedefs for MAC */
76 #include "rg_sch_inf.x" /* typedefs for Scheduler */
77 #include "rg_sch.x" /* typedefs for Scheduler */
78 #include "rg_sch_cmn.x" /* typedefs for Scheduler */
80 #include "lrg.x" /* Stats Structures */
81 #endif /* MAC_SCH_STATS */
84 #endif /* __cplusplus */
87 EXTERN U32 emtcStatsUlTomSrInd;
88 EXTERN U32 emtcStatsUlBsrTmrTxp;
91 #define RG_ITBS_DIFF(_x, _y) ((_x) > (_y) ? (_x) - (_y) : (_y) - (_x))
92 EXTERN Void rgSCHSc1UlInit ARGS((RgUlSchdApis *apis));
93 #ifdef RG_PHASE2_SCHED
94 EXTERN Void rgSCHRrUlInit ARGS((RgUlSchdApis *apis));
96 EXTERN Void rgSCHEmtcHqInfoFree ARGS((RgSchCellCb *cell, RgSchDlHqProcCb *hqP));
97 EXTERN Void rgSCHEmtcRrUlInit ARGS((RgUlSchdApis *apis));
98 EXTERN Void rgSCHEmtcCmnDlInit ARGS((Void));
99 EXTERN Void rgSCHEmtcCmnUlInit ARGS((Void));
100 EXTERN Void rgSCHEmtcCmnUeNbReset ARGS((RgSchUeCb *ueCb));
101 EXTERN RgSchCmnCqiToTbs *rgSchEmtcCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
103 EXTERN Void rgSCHMaxciUlInit ARGS((RgUlSchdApis *apis));
104 EXTERN Void rgSCHPfsUlInit ARGS((RgUlSchdApis *apis));
106 EXTERN Void rgSCHSc1DlInit ARGS((RgDlSchdApis *apis));
107 #ifdef RG_PHASE2_SCHED
108 EXTERN Void rgSCHRrDlInit ARGS((RgDlSchdApis *apis));
110 EXTERN Void rgSCHEmtcRrDlInit ARGS((RgDlEmtcSchdApis *apis));
112 EXTERN Void rgSCHMaxciDlInit ARGS((RgDlSchdApis *apis));
113 EXTERN Void rgSCHPfsDlInit ARGS((RgDlSchdApis *apis));
115 EXTERN Void rgSCHDlfsInit ARGS((RgDlfsSchdApis *apis));
119 EXTERN Void rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl ARGS((RgSchCellCb *cell));
120 EXTERN Void rgSCHCmnGetEmtcDciFrmtSizes ARGS((RgSchCellCb *cell));
121 EXTERN Void rgSCHEmtcRrUlProcRmvFrmRetx ARGS((RgSchCellCb *cell, RgSchUlHqProcCb *proc));
122 EXTERN S16 rgSCHCmnPrecompEmtcMsg3Vars
124 RgSchCmnUlCell *cellUl,
130 PUBLIC Void rgSCHEmtcCmnUeCcchSduDel
135 EXTERN Void rgSCHEmtcRmvFrmTaLst
137 RgSchCmnDlCell *cellDl,
140 EXTERN Void rgSCHEmtcInitTaLst
142 RgSchCmnDlCell *cellDl
144 EXTERN Void rgSCHEmtcAddToTaLst
146 RgSchCmnDlCell *cellDl,
153 PRIVATE Void rgSCHDlSiSched ARGS((RgSchCellCb *cell,
154 RgSchCmnDlRbAllocInfo *allocInfo,
155 RgInfSfAlloc *subfrmAlloc));
156 PRIVATE Void rgSCHChkNUpdSiCfg ARGS((RgSchCellCb *cell));
157 PRIVATE Void rgSCHSelectSi ARGS((RgSchCellCb *cell));
158 #endif /*RGR_SI_SCH*/
159 /* LTE_ADV_FLAG_REMOVED_START */
162 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
170 PRIVATE S16 rgSCHCmnBuildRntpInfo (
180 PUBLIC Void rgSCHCmnDlSpsSch
184 /* LTE_ADV_FLAG_REMOVED_END */
186 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc ARGS((
188 RgSchCmnDlRbAllocInfo *allocInfo
190 PRIVATE Void rgSCHBcchPcchDlRbAlloc ARGS((
192 RgSchCmnDlRbAllocInfo *allocInfo
194 PRIVATE Void rgSCHCmnDlBcchPcchAlloc ARGS((
198 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
201 TfuDlCqiPucch *pucchCqi,
202 RgrUeCqiRept *ueCqiRept,
204 Bool *is2ndCwCqiAvail
206 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
209 TfuDlCqiPusch *puschCqi,
210 RgrUeCqiRept *ueCqiRept,
212 Bool *is2ndCwCqiAvail
215 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
218 TfuDlCqiPucch *pucchCqi
220 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
223 TfuDlCqiPusch *puschCqi
226 /* ccpu00117452 - MOD - Changed macro name from
227 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
229 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept ARGS((
232 RgrUeCqiRept *ueCqiRept));
233 #endif /* End of RGR_CQI_REPT */
234 /* Fix: syed align multiple UEs to refresh at same time */
235 PRIVATE Void rgSCHCmnGetRefreshPer ARGS((
239 PRIVATE S16 rgSCHCmnApplyUeRefresh ARGS((
243 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa ARGS
248 PRIVATE Void rgSCHCheckAndSetTxScheme ARGS
256 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz ARGS
266 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw ARGS
283 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
287 RgSchDlRbAlloc *allocInfo,
291 PRIVATE Void rgSCHCmnInitRbAlloc ARGS
297 #endif /* __cplusplus */
301 PUBLIC RgSchdApis rgSchCmnApis;
302 PRIVATE RgUlSchdApis rgSchUlSchdTbl[RGSCH_NUM_SCHEDULERS];
303 PRIVATE RgDlSchdApis rgSchDlSchdTbl[RGSCH_NUM_SCHEDULERS];
305 PRIVATE RgUlSchdApis rgSchEmtcUlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
306 PRIVATE RgDlEmtcSchdApis rgSchEmtcDlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
308 #ifdef RG_PHASE2_SCHED
309 PRIVATE RgDlfsSchdApis rgSchDlfsSchdTbl[RGSCH_NUM_DLFS_SCHEDULERS];
311 PRIVATE RgUlSchdInits rgSchUlSchdInits = RGSCH_ULSCHED_INITS;
312 PRIVATE RgDlSchdInits rgSchDlSchdInits = RGSCH_DLSCHED_INITS;
314 PRIVATE RgEmtcUlSchdInits rgSchEmtcUlSchdInits = RGSCH_EMTC_ULSCHED_INITS;
315 PRIVATE RgEmtcDlSchdInits rgSchEmtcDlSchdInits = RGSCH_EMTC_DLSCHED_INITS;
317 #if (defined (RG_PHASE2_SCHED) && defined (TFU_UPGRADE))
318 PRIVATE RgDlfsSchdInits rgSchDlfsSchdInits = RGSCH_DLFSSCHED_INITS;
321 typedef Void (*RgSchCmnDlAllocRbFunc) ARGS((RgSchCellCb *cell, RgSchDlSf *subFrm,
322 RgSchUeCb *ue, U32 bo, U32 *effBo, RgSchDlHqProcCb *proc,
323 RgSchCmnDlRbAllocInfo *cellWdAllocInfo));
324 typedef U8 (*RgSchCmnDlGetPrecInfFunc) ARGS((RgSchCellCb *cell, RgSchUeCb *ue,
325 U8 numLyrs, Bool bothCwEnbld));
327 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1 ARGS((
329 RgSchDlRbAlloc *rbAllocInfo,
330 RgSchDlHqProcCb *hqP,
334 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A ARGS((
336 RgSchDlRbAlloc *rbAllocInfo,
337 RgSchDlHqProcCb *hqP,
341 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B ARGS((
343 RgSchDlRbAlloc *rbAllocInfo,
344 RgSchDlHqProcCb *hqP,
348 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2 ARGS((
350 RgSchDlRbAlloc *rbAllocInfo,
351 RgSchDlHqProcCb *hqP,
355 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A ARGS((
357 RgSchDlRbAlloc *rbAllocInfo,
358 RgSchDlHqProcCb *hqP,
363 PRIVATE Void rgSCHCmnDlAllocTxRbTM1 ARGS((
369 RgSchDlHqProcCb *proc,
370 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
372 PRIVATE Void rgSCHCmnDlAllocTxRbTM2 ARGS((
378 RgSchDlHqProcCb *proc,
379 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
381 PRIVATE Void rgSCHCmnDlAllocTxRbTM3 ARGS((
387 RgSchDlHqProcCb *proc,
388 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
390 PRIVATE Void rgSCHCmnDlAllocTxRbTM4 ARGS((
396 RgSchDlHqProcCb *proc,
397 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
400 PRIVATE Void rgSCHCmnDlAllocTxRbTM5 ARGS((
406 RgSchDlHqProcCb *proc,
407 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
410 PRIVATE Void rgSCHCmnDlAllocTxRbTM6 ARGS((
416 RgSchDlHqProcCb *proc,
417 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
419 PRIVATE Void rgSCHCmnDlAllocTxRbTM7 ARGS((
425 RgSchDlHqProcCb *proc,
426 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
428 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1 ARGS((
434 RgSchDlHqProcCb *proc,
435 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
437 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2 ARGS((
443 RgSchDlHqProcCb *proc,
444 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
446 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3 ARGS((
452 RgSchDlHqProcCb *proc,
453 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
455 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4 ARGS((
461 RgSchDlHqProcCb *proc,
462 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
465 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5 ARGS((
471 RgSchDlHqProcCb *proc,
472 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
475 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6 ARGS((
481 RgSchDlHqProcCb *proc,
482 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
484 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7 ARGS((
490 RgSchDlHqProcCb *proc,
491 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
495 PRIVATE U8 rgSchGetN1ResCount ARGS ((
499 PUBLIC Bool rgSchCmnChkDataOnlyOnPcell
505 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
512 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
513 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[7] = {rgSCHCmnDlAllocTxRbTM1,
514 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
515 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7};
517 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
518 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[7] = {rgSCHCmnDlAllocRetxRbTM1,
519 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
520 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7};
522 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
523 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[9] = {rgSCHCmnDlAllocTxRbTM1,
524 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
525 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7, NULLP, NULLP};
527 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
528 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[9] = {rgSCHCmnDlAllocRetxRbTM1,
529 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
530 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7, NULLP, NULLP};
535 PRIVATE U8 rgSCHCmnDlTM3PrecInf2 ARGS((
541 PRIVATE U8 rgSCHCmnDlTM3PrecInf4 ARGS((
547 PRIVATE U8 rgSCHCmnDlTM4PrecInf2 ARGS((
553 PRIVATE U8 rgSCHCmnDlTM4PrecInf4 ARGS((
559 /* Functions specific to each transmission mode for DL RB Allocation*/
560 RgSchCmnDlGetPrecInfFunc getPrecInfoFunc[2][2] = {
561 {rgSCHCmnDlTM3PrecInf2, rgSCHCmnDlTM3PrecInf4},
562 {rgSCHCmnDlTM4PrecInf2, rgSCHCmnDlTM4PrecInf4}
565 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb ARGS((
569 RgSchDlHqTbCb *tbInfo,
574 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb ARGS((
578 RgSchDlHqProcCb *proc,
583 PRIVATE Void rgSCHCmnDlTM3TxTx ARGS((
589 RgSchDlHqProcCb *proc,
590 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
592 PRIVATE Void rgSCHCmnDlTM3TxRetx ARGS((
598 RgSchDlHqProcCb *proc,
599 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
601 PRIVATE Void rgSCHCmnDlTM3RetxRetx ARGS((
607 RgSchDlHqProcCb *proc,
608 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
611 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc ARGS((
617 /* LTE_ADV_FLAG_REMOVED_START */
619 PRIVATE Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc ARGS((
626 /* LTE_ADV_FLAG_REMOVED_END */
627 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx ARGS((
629 RgSchCmnDlRbAllocInfo *allocInfo,
631 RgSchDlHqProcCb *proc
633 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx ARGS((
635 RgSchCmnDlRbAllocInfo *allocInfo,
639 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst ARGS((
640 RgSchCmnDlRbAllocInfo *allocInfo,
642 RgSchDlHqProcCb *proc
644 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb ARGS((
648 RgSchDlHqTbCb *reTxTb,
653 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb ARGS((
657 RgSchDlHqProcCb *proc,
662 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb ARGS((
666 RgSchDlHqTbCb *tbInfo,
672 PRIVATE Void rgSCHCmnFillHqPTb ARGS((
674 RgSchDlRbAlloc *rbAllocInfo,
680 PRIVATE Void rgSCHCmnDlGetBestFitHole ARGS((
689 #ifdef RGSCH_SPS_UNUSED
690 PRIVATE U32 rgSCHCmnGetRaType1Mask ARGS((
696 PRIVATE U32 rgSCHCmnGetRaType0Mask ARGS((
700 PRIVATE U32 rgSCHCmnGetRaType2Mask ARGS((
706 PUBLIC Bool rgSCHCmnRetxAllocAvoid ARGS((
709 RgSchDlHqProcCb *proc
712 PUBLIC U16 rgSCHCmnGetSiSetId ARGS((
720 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi ARGS((
734 //TODO_SID: Currenly table is only for 100 Prbs. Need to modify wrt VRBG table 8.1.5.2.1-1 V5G_213
735 U32 rgSch5gtfTbSzTbl[MAX_5GTF_MCS] =
736 {1864, 5256, 8776, 13176, 17576, 21976, 26376, 31656, 35176, 39576, 43976, 47496, 52776, 59376, 66392};
738 U32 gUl5gtfSrRecv = 0;
739 U32 gUl5gtfBsrRecv = 0;
740 U32 gUl5gtfUeSchPick = 0;
741 U32 gUl5gtfPdcchSchd = 0;
742 U32 gUl5gtfAllocAllocated = 0;
743 U32 gUl5gtfUeRbAllocDone = 0;
744 U32 gUl5gtfUeRmvFnlzZeroBo = 0;
745 U32 gUl5gtfUeFnlzReAdd = 0;
746 U32 gUl5gtfPdcchSend = 0;
747 U32 gUl5gtfRbAllocFail = 0;
748 U32 ul5gtfsidUlMarkUl = 0;
749 U32 ul5gtfsidDlSchdPass = 0;
750 U32 ul5gtfsidDlAlreadyMarkUl = 0;
751 U32 ul5gtfTotSchdCnt = 0;
754 /* CQI Offset Index to Beta CQI Offset value mapping,
755 * stored as parts per 1000. Reserved is set to 0.
756 * Refer 36.213 sec 8.6.3 Tbl 8.6.3-3 */
757 PUBLIC U32 rgSchCmnBetaCqiOffstTbl[16] = {0, 0, 1125,
758 1250, 1375, 1625, 1750, 2000, 2250, 2500, 2875,
759 3125, 3500, 4000, 5000, 6250};
760 PUBLIC U32 rgSchCmnBetaHqOffstTbl[16] = {2000, 2500, 3125,
761 4000, 5000, 6250, 8000,10000, 12625, 15875, 20000,
762 31000, 50000,80000,126000,0};
763 PUBLIC U32 rgSchCmnBetaRiOffstTbl[16] = {1250, 1625, 2000,
764 2500, 3125, 4000, 5000, 6250, 8000, 10000,12625,
766 PUBLIC S8 rgSchCmnDlCqiDiffOfst[8] = {0, 1, 2, 3, -4, -3, -2, -1};
768 /* Include CRS REs while calculating Efficiency */
769 CONSTANT PRIVATE U8 rgSchCmnAntIdx[5] = {0,0,1,0,2};
770 CONSTANT PRIVATE U8 rgSchCmnNumResForCrs[5] = {0,6,12,0,16};
777 PUBLIC S8 rgSchCmnApUeSelDiffCqi[4] = {1, 2, 3, 4};
778 PUBLIC S8 rgSchCmnApEnbConfDiffCqi[4] = {0, 1, 2, -1};
781 typedef struct rgSchCmnDlUeDciFrmtOptns
783 TfuDciFormat spfcDciFrmt; /* TM(Transmission Mode) specific DCI format.
784 * Search space : UE Specific by C-RNTI only. */
785 U8 spfcDciRAType; /* Resource Alloctn(RA) type for spfcDciFrmt */
786 TfuDciFormat prfrdDciFrmt; /* Preferred DCI format among the available
787 * options for TD (Transmit Diversity) */
788 U8 prfrdDciRAType; /* Resource Alloctn(RA) type for prfrdDciFrmt */
789 }RgSchCmnDlUeDciFrmtOptns;
792 /* DCI Format options for each Transmission Mode */
793 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[7] = {
794 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
795 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
796 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
797 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
798 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
799 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
800 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
804 /* DCI Format options for each Transmission Mode */
805 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[9] = {
806 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
807 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
808 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
809 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
810 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
811 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
812 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
817 typedef struct rgSchCmnDlImcsTbl
819 U8 modOdr; /* Modulation Order */
821 }RgSchCmnDlImcsTbl[29];
823 CONSTANT struct rgSchCmnMult235Info
825 U8 match; /* Closest number satisfying 2^a.3^b.5^c, with a bias
826 * towards the smaller number */
827 U8 prvMatch; /* Closest number not greater than array index
828 * satisfying 2^a.3^b.5^c */
829 } rgSchCmnMult235Tbl[110+1] = {
831 {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {6, 6}, {8, 8},
832 {9, 9}, {10, 10}, {10, 10}, {12, 12}, {12, 12}, {15, 12}, {15, 15},
833 {16, 16}, {16, 16}, {18, 18}, {18, 18}, {20, 20}, {20, 20}, {20, 20},
834 {24, 20}, {24, 24}, {25, 25}, {25, 25}, {27, 27}, {27, 27}, {30, 27},
835 {30, 30}, {30, 30}, {32, 32}, {32, 32}, {32, 32}, {36, 32}, {36, 36},
836 {36, 36}, {36, 36}, {40, 36}, {40, 40}, {40, 40}, {40, 40}, {45, 40},
837 {45, 40}, {45, 45}, {45, 45}, {48, 45}, {48, 48}, {48, 48}, {50, 50},
838 {50, 50}, {50, 50}, {54, 50}, {54, 54}, {54, 54}, {54, 54}, {54, 54},
839 {60, 54}, {60, 54}, {60, 60}, {60, 60}, {60, 60}, {64, 60}, {64, 64},
840 {64, 64}, {64, 64}, {64, 64}, {64, 64}, {72, 64}, {72, 64}, {72, 64},
841 {72, 72}, {72, 72}, {75, 72}, {75, 75}, {75, 75}, {75, 75}, {80, 75},
842 {80, 75}, {80, 80}, {81, 81}, {81, 81}, {81, 81}, {81, 81}, {81, 81},
843 {90, 81}, {90, 81}, {90, 81}, {90, 81}, {90, 90}, {90, 90}, {90, 90},
844 {90, 90}, {96, 90}, {96, 90}, {96, 96}, {96, 96}, {96, 96}, {100, 96},
845 {100, 100}, {100, 100}, {100, 100}, {100, 100}, {100, 100}, {108, 100},
846 {108, 100}, {108, 100}, {108, 108}, {108, 108}, {108, 108}
850 /* BI table from 36.321 Table 7.2.1 */
851 CONSTANT PRIVATE S16 rgSchCmnBiTbl[RG_SCH_CMN_NUM_BI_VAL] = {
852 0, 10, 20, 30,40,60,80,120,160,240,320,480,960};
853 PUBLIC RgSchCmnUlCqiInfo rgSchCmnUlCqiTbl[RG_SCH_CMN_UL_NUM_CQI] = {
855 {RGSCH_CMN_QM_CQI_1,RGSCH_CMN_UL_EFF_CQI_1 },
856 {RGSCH_CMN_QM_CQI_2,RGSCH_CMN_UL_EFF_CQI_2 },
857 {RGSCH_CMN_QM_CQI_3,RGSCH_CMN_UL_EFF_CQI_3 },
858 {RGSCH_CMN_QM_CQI_4,RGSCH_CMN_UL_EFF_CQI_4 },
859 {RGSCH_CMN_QM_CQI_5,RGSCH_CMN_UL_EFF_CQI_5 },
860 {RGSCH_CMN_QM_CQI_6,RGSCH_CMN_UL_EFF_CQI_6 },
861 {RGSCH_CMN_QM_CQI_7,RGSCH_CMN_UL_EFF_CQI_7 },
862 {RGSCH_CMN_QM_CQI_8,RGSCH_CMN_UL_EFF_CQI_8 },
863 {RGSCH_CMN_QM_CQI_9,RGSCH_CMN_UL_EFF_CQI_9 },
864 {RGSCH_CMN_QM_CQI_10,RGSCH_CMN_UL_EFF_CQI_10 },
865 {RGSCH_CMN_QM_CQI_11,RGSCH_CMN_UL_EFF_CQI_11 },
866 {RGSCH_CMN_QM_CQI_12,RGSCH_CMN_UL_EFF_CQI_12 },
867 {RGSCH_CMN_QM_CQI_13,RGSCH_CMN_UL_EFF_CQI_13 },
868 {RGSCH_CMN_QM_CQI_14,RGSCH_CMN_UL_EFF_CQI_14 },
869 {RGSCH_CMN_QM_CQI_15,RGSCH_CMN_UL_EFF_CQI_15 },
873 /* This table maps a (delta_offset * 2 + 2) to a (beta * 8)
874 * where beta is 10^-(delta_offset/10) rounded off to nearest 1/8
876 PRIVATE U16 rgSchCmnUlBeta8Tbl[29] = {
877 6, RG_SCH_CMN_UL_INVALID_BETA8, 8, 9, 10, 11, 13, 14, 16, 18, 20, 23,
878 25, 28, 32, RG_SCH_CMN_UL_INVALID_BETA8, 40, RG_SCH_CMN_UL_INVALID_BETA8,
879 50, RG_SCH_CMN_UL_INVALID_BETA8, 64, RG_SCH_CMN_UL_INVALID_BETA8, 80,
880 RG_SCH_CMN_UL_INVALID_BETA8, 101, RG_SCH_CMN_UL_INVALID_BETA8, 127,
881 RG_SCH_CMN_UL_INVALID_BETA8, 160
885 /* QCI to SVC priority mapping. Index specifies the Qci*/
886 PRIVATE U8 rgSchCmnDlQciPrio[RG_SCH_CMN_MAX_QCI] = RG_SCH_CMN_QCI_TO_PRIO;
888 /* The configuration is efficiency measured per 1024 REs. */
889 /* The first element stands for when CQI is not known */
890 /* This table is used to translate CQI to its corrospoding */
891 /* allocation parameters. These are currently from 36.213 */
892 /* Just this talbe needs to be edited for modifying the */
893 /* the resource allocation behaviour */
895 /* ADD CQI to MCS mapping correction
896 * single dimensional array is replaced by 2 dimensions for different CFI*/
897 PRIVATE U16 rgSchCmnCqiPdschEff[4][16] = {RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI1,
898 RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI2,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI3};
900 PRIVATE U16 rgSchCmn2LyrCqiPdschEff[4][16] = {RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI1,
901 RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI2, RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI3};
903 /* This configuration determines the transalation of a UEs CQI to its */
904 /* PDCCH coding efficiency. This may be edited based on the installation */
905 PRIVATE U8 rgSchCmnDlRvTbl[4] = {0, 2, 3, 1}; /* RVIdx sequence is corrected*/
907 /* Indexed by [DciFrmt].
908 * Considering the following definition in determining the dciFrmt index.
923 PRIVATE U16 rgSchCmnDciFrmtSizes[10];
926 PRIVATE U16 rgSchCmnCqiPdcchEff[16] = RG_SCH_CMN_CQI_TO_PDCCH_EFF;
930 PUBLIC RgSchTddUlDlSubfrmTbl rgSchTddUlDlSubfrmTbl = {
931 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME},
932 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
933 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
934 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
935 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
936 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
937 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME}
942 PUBLIC U8 rgSchTddSpsDlMaxRetxTbl[RGSCH_MAX_TDD_UL_DL_CFG] = {
954 /* Special Subframes in OFDM symbols */
955 /* ccpu00134197-MOD-Correct the number of symbols */
956 PUBLIC RgSchTddSplSubfrmInfoTbl rgSchTddSplSubfrmInfoTbl = {
960 {11, 1, 1, 10, 1, 1},
968 /* PHICH 'm' value Table */
969 PUBLIC RgSchTddPhichMValTbl rgSchTddPhichMValTbl = {
970 {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
971 {0, 1, 0, 0, 1, 0, 1, 0, 0, 1},
972 {0, 0, 0, 1, 0, 0, 0, 0, 1, 0},
973 {1, 0, 0, 0, 0, 0, 0, 0, 1, 1},
974 {0, 0, 0, 0, 0, 0, 0, 0, 1, 1},
975 {0, 0, 0, 0, 0, 0, 0, 0, 1, 0},
976 {1, 1, 0, 0, 0, 1, 1, 0, 0, 1}
979 /* PHICH 'K' value Table */
980 PUBLIC RgSchTddKPhichTbl rgSchTddKPhichTbl = {
981 {0, 0, 4, 7, 6, 0, 0, 4, 7, 6},
982 {0, 0, 4, 6, 0, 0, 0, 4, 6, 0},
983 {0, 0, 6, 0, 0, 0, 0, 6, 0, 0},
984 {0, 0, 6, 6, 6, 0, 0, 0, 0, 0},
985 {0, 0, 6, 6, 0, 0, 0, 0, 0, 0},
986 {0, 0, 6, 0, 0, 0, 0, 0, 0, 0},
987 {0, 0, 4, 6, 6, 0, 0, 4, 7, 0}
990 /* Uplink association index 'K' value Table */
991 PUBLIC RgSchTddUlAscIdxKDashTbl rgSchTddUlAscIdxKDashTbl = {
992 {0, 0, 6, 4, 0, 0, 0, 6, 4, 0},
993 {0, 0, 4, 0, 0, 0, 0, 4, 0, 0},
994 {0, 0, 4, 4, 4, 0, 0, 0, 0, 0},
995 {0, 0, 4, 4, 0, 0, 0, 0, 0, 0},
996 {0, 0, 4, 0, 0, 0, 0, 0, 0, 0},
997 {0, 0, 7, 7, 5, 0, 0, 7, 7, 0}
1001 /* PUSCH 'K' value Table */
1002 PUBLIC RgSchTddPuschTxKTbl rgSchTddPuschTxKTbl = {
1003 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1004 {0, 6, 0, 0, 4, 0, 6, 0, 0, 4},
1005 {0, 0, 0, 4, 0, 0, 0, 0, 4, 0},
1006 {4, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1007 {0, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1008 {0, 0, 0, 0, 0, 0, 0, 0, 4, 0},
1009 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1012 /* PDSCH to PUCCH Table for DL Harq Feed back. Based on the
1013 Downlink association set index 'K' table */
1014 PUBLIC U8 rgSchTddPucchTxTbl[7][10] = {
1015 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1016 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1017 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1018 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1019 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1020 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1021 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1024 /* Table to fetch the next DL sf idx for applying the
1025 new CFI. The next Dl sf Idx at which the new CFI
1026 is applied is always the starting Sf of the next ACK/NACK
1029 Ex: In Cfg-2, sf4 and sf9 are the only subframes at which
1030 a new ACK/NACK bundle of DL subframes can start
1032 D S U D D D S U D D D S U D D D S U D D
1035 dlSf Array for Cfg-2:
1036 sfNum: 0 1 3 4 5 6 8 9 0 1 3 4 5 6 8 9
1037 sfIdx: 0 1 2 3 4 5 6 7 8 9 10 11 12 12 14 15
1039 If CFI changes at sf0, nearest DL SF bundle >= 4 TTI is sf4
1040 So at sf4 the new CFI can be applied. To arrive at sf4 from
1041 sf0, the sfIdx has to be increased by 3 */
1043 PUBLIC U8 rgSchTddPdcchSfIncTbl[7][10] = {
1044 /* A/N Bundl: 0,1,5,6*/ {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
1045 /* A/N Bundl: 0,4,5,9*/ {2, 2, 0, 0, 3, 2, 2, 0, 0, 3},
1046 /* A/N Bundl: 4,9*/ {3, 6, 0, 5, 4, 3, 6, 0, 5, 4},
1047 /* A/N Bundl: 1,7,9*/ {4, 3, 0, 0, 0, 4, 5, 4, 6, 5},
1048 /* A/N Bundl: 0,6*/ {4, 3, 0, 0, 6, 5, 4, 7, 6, 5},
1049 /* A/N Bundl: 9*/ {8, 7, 0, 6, 5, 4, 12, 11, 10, 9},
1050 /* A/N Bundl: 0,1,5,6,9*/ {2, 1, 0, 0, 0, 2, 2, 0, 0, 3}
1054 /* combine compilation fixes */
1056 /* subframe offset values to be used when twoIntervalsConfig is enabled in UL
1058 PUBLIC RgSchTddSfOffTbl rgSchTddSfOffTbl = {
1059 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1060 {0, 0, 1, -1, 0, 0, 0, 1, -1, 0},
1061 {0, 0, 5, 0, 0, 0, 0, -5, 0, 0},
1062 {0, 0, 1, 1, -2, 0, 0, 0, 0, 0},
1063 {0, 0, 1, -1, 0, 0, 0, 0, 0, 0},
1064 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1065 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
1069 /* Table to determine when uplink SPS configured grants should
1070 * explicitly be reserved in a subframe. When enries are same
1071 * as that of Msg3SubfrmTbl, indicates competition with msg3.
1072 * As of now, this is same as Msg3SubfrmTbl (leaving out uldlcfg 2),
1073 * except that all 255s are now zeros. */
1074 PUBLIC RgSchTddSpsUlRsrvTbl rgSchTddSpsUlRsrvTbl = {
1075 {0, 0, 0, 6, 8, 0, 0, 0, 6, 8},
1076 {0, 0, 6, 9, 0, 0, 0, 6, 9, 0},
1077 {0, 0, 10, 0, 0, 0, 0, 10, 0, 0},
1078 {0, 0, 0, 0, 8, 0, 7, 7, 14, 0},
1079 {0, 0, 0, 9, 0, 0, 7, 15, 0, 0},
1080 {0, 0, 10, 0, 0, 0, 16, 0, 0, 0},
1081 {0, 0, 0, 0, 8, 0, 0, 0, 9, 0}
1084 /* Inverse DL Assoc Set index Table */
1085 PUBLIC RgSchTddInvDlAscSetIdxTbl rgSchTddInvDlAscSetIdxTbl = {
1086 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1087 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1088 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1089 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1090 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1091 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1092 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1095 #endif /* (LTEMAC_SPS ) */
1097 /* Number of Uplink subframes Table */
1098 PRIVATE U8 rgSchTddNumUlSf[] = {6, 4, 2, 3, 2, 1, 5};
1100 /* Downlink HARQ processes Table */
1101 PUBLIC RgSchTddUlNumHarqProcTbl rgSchTddUlNumHarqProcTbl = { 7, 4, 2, 3, 2, 1, 6};
1103 /* Uplink HARQ processes Table */
1104 PUBLIC RgSchTddDlNumHarqProcTbl rgSchTddDlNumHarqProcTbl = { 4, 7, 10, 9, 12, 15, 6};
1106 /* Downlink association index set 'K' value Table */
1107 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlAscSetIdxKTbl = {
1108 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1110 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1112 { {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}} },
1114 { {0, {0}}, {0, {0}}, {3, {7, 6, 11}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1116 { {0, {0}}, {0, {0}}, {4, {12, 8, 7, 11}}, {4, {6, 5, 4, 7}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1118 { {0, {0}}, {0, {0}}, {9, {13, 12, 9, 8, 7, 5, 4, 11, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1120 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1123 /* ccpu132282-ADD-the table rgSchTddDlAscSetIdxKTbl is rearranged in
1124 * decreasing order of Km, this is used to calculate the NCE used for
1125 * calculating N1Pucch Resource for Harq*/
1126 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlHqPucchResCalTbl = {
1127 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1129 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1131 { {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}} },
1133 { {0, {0}}, {0, {0}}, {3, {11, 7, 6}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1135 { {0, {0}}, {0, {0}}, {4, {12, 11, 8, 7}}, {4, {7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1137 { {0, {0}}, {0, {0}}, {9, {13, 12, 11, 9, 8, 7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1139 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1142 /* Minimum number of Ack/Nack feeback information to be
1143 stored for each UL-DL configuration */
1144 PUBLIC RgSchTddANFdbkMapTbl rgSchTddANFdbkMapTbl = {4, 4, 2, 3, 2, 1, 5};
1146 /* Uplink switch points and number of UL subframes Table */
1147 PUBLIC RgSchTddMaxUlSubfrmTbl rgSchTddMaxUlSubfrmTbl = {
1148 {2,3,3}, {2,2,2}, {2,1,1}, {1,3,0}, {1,2,0}, {1,1,0}, {2,3,2}
1151 /* Uplink switch points and number of DL subframes Table */
1152 PUBLIC RgSchTddMaxDlSubfrmTbl rgSchTddMaxDlSubfrmTbl = {
1153 {2,2,2}, {2,3,3}, {2,4,4}, {1,7,0}, {1,8,0}, {1,9,0}, {2,2,3}
1156 /* Number of UL subframes present before a particular subframe */
1157 PUBLIC RgSchTddNumUlSubfrmTbl rgSchTddNumUlSubfrmTbl = {
1158 {0, 0, 1, 2, 3, 3, 3, 4, 5, 6},
1159 {0, 0, 1, 2, 2, 2, 2, 3, 4, 4},
1160 {0, 0, 1, 1, 1, 1, 1, 2, 2, 2},
1161 {0, 0, 1, 2, 3, 3, 3, 3, 3, 3},
1162 {0, 0, 1, 2, 2, 2, 2, 2, 2, 2},
1163 {0, 0, 1, 1, 1, 1, 1, 1, 1, 1},
1164 {0, 0, 1, 2, 3, 3, 3, 4, 5, 5}
1167 /* Number of DL subframes present till a particular subframe */
1168 PUBLIC RgSchTddNumDlSubfrmTbl rgSchTddNumDlSubfrmTbl = {
1169 {1, 2, 2, 2, 2, 3, 4, 4, 4, 4},
1170 {1, 2, 2, 2, 3, 4, 5, 5, 5, 6},
1171 {1, 2, 2, 3, 4, 5, 6, 6, 7, 8},
1172 {1, 2, 2, 2, 2, 3, 4, 5, 6, 7},
1173 {1, 2, 2, 2, 3, 4, 5, 6, 7, 8},
1174 {1, 2, 2, 3, 4, 5, 6, 7, 8, 9},
1175 {1, 2, 2, 2, 2, 3, 4, 4, 4, 5}
1179 /* Nearest possible UL subframe Index from UL subframe
1180 * DL Index < UL Index */
1181 PUBLIC RgSchTddLowDlSubfrmIdxTbl rgSchTddLowDlSubfrmIdxTbl = {
1182 {0, 1, 1, 1, 1, 5, 6, 6, 6, 6},
1183 {0, 1, 1, 1, 4, 5, 6, 6, 6, 9},
1184 {0, 1, 1, 3, 4, 5, 6, 6, 8, 9},
1185 {0, 1, 1, 1, 1, 5, 6, 7, 8, 9},
1186 {0, 1, 1, 1, 4, 5, 6, 7, 8, 9},
1187 {0, 1, 1, 3, 4, 5, 6, 7, 8, 9},
1188 {0, 1, 1, 1, 1, 5, 6, 6, 6, 9}
1191 /* Nearest possible DL subframe Index from UL subframe
1192 * DL Index > UL Index
1193 * 10 represents Next SFN low DL Idx */
1194 PUBLIC RgSchTddHighDlSubfrmIdxTbl rgSchTddHighDlSubfrmIdxTbl = {
1195 {0, 1, 5, 5, 5, 5, 6, 10, 10, 10},
1196 {0, 1, 4, 4, 4, 5, 6, 9, 9, 9},
1197 {0, 1, 3, 3, 4, 5, 6, 8, 8, 9},
1198 {0, 1, 5, 5, 5, 5, 6, 7, 8, 9},
1199 {0, 1, 4, 4, 4, 5, 6, 7, 8, 9},
1200 {0, 1, 3, 3, 4, 5, 6, 7, 8, 9},
1201 {0, 1, 5, 5, 5, 5, 6, 9, 9, 9}
1204 /* RACH Message3 related information */
1205 PUBLIC RgSchTddMsg3SubfrmTbl rgSchTddMsg3SubfrmTbl = {
1206 {7, 6, 255, 255, 255, 7, 6, 255, 255, 255},
1207 {7, 6, 255, 255, 8, 7, 6, 255, 255, 8},
1208 {7, 6, 255, 9, 8, 7, 6, 255, 9, 8},
1209 {12, 11, 255, 255, 255, 7, 6, 6, 6, 13},
1210 {12, 11, 255, 255, 8, 7, 6, 6, 14, 13},
1211 {12, 11, 255, 9, 8, 7, 6, 15, 14, 13},
1212 {7, 6, 255, 255, 255, 7, 6, 255, 255, 8}
1215 /* ccpu00132341-DEL Removed rgSchTddRlsDlSubfrmTbl and used Kset table for
1216 * releasing DL HARQs */
1218 /* DwPTS Scheduling Changes Start */
1219 /* Provides the number of Cell Reference Signals in DwPTS
1221 PRIVATE U8 rgSchCmnDwptsCrs[2][3] = {/* [Spl Sf cfg][Ant Port] */
1222 {4, 8, 16}, /* Spl Sf cfg 1,2,3,6,7,8 */
1223 {6, 12, 20}, /* Spl Sf cfg 4 */
1226 PRIVATE S8 rgSchCmnSplSfDeltaItbs[9] = RG_SCH_DWPTS_ITBS_ADJ;
1227 /* DwPTS Scheduling Changes End */
1231 PRIVATE U32 rgSchCmnBsrTbl[64] = {
1232 0, 10, 12, 14, 17, 19, 22, 26,
1233 31, 36, 42, 49, 57, 67, 78, 91,
1234 107, 125, 146, 171, 200, 234, 274, 321,
1235 376, 440, 515, 603, 706, 826, 967, 1132,
1236 1326, 1552, 1817, 2127, 2490, 2915, 3413, 3995,
1237 4677, 5476, 6411, 7505, 8787, 10287, 12043, 14099,
1238 16507, 19325, 22624, 26487, 31009, 36304, 42502, 49759,
1239 58255, 68201, 79846, 93479, 109439, 128125, 150000, 220000
1242 PRIVATE U32 rgSchCmnExtBsrTbl[64] = {
1243 0, 10, 13, 16, 19, 23, 29, 35,
1244 43, 53, 65, 80, 98, 120, 147, 181,
1245 223, 274, 337, 414, 509, 625, 769, 945,
1246 1162, 1429, 1757, 2161, 2657, 3267, 4017, 4940,
1247 6074, 7469, 9185, 11294, 13888, 17077, 20999, 25822,
1248 31752, 39045, 48012, 59039, 72598, 89272, 109774, 134986,
1249 165989, 204111, 250990, 308634, 379519, 466683, 573866, 705666,
1250 867737, 1067031, 1312097, 1613447, 1984009, 2439678, 3000000, 3100000
1254 PRIVATE U8 rgSchCmnUlRvIdxToIMcsTbl[4] = {32, 30, 31, 29};
1256 PUBLIC U8 rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_MAX_CP][RG_SCH_CMN_UL_NUM_CQI];
1258 PUBLIC RgSchTbSzTbl rgTbSzTbl = {
1260 {16, 32, 56, 88, 120, 152, 176, 208, 224, 256, 288, 328, 344, 376, 392, 424, 456, 488, 504, 536, 568, 600, 616, 648, 680, 712, 744, 776, 776, 808, 840, 872, 904, 936, 968, 1000, 1032, 1032, 1064, 1096, 1128, 1160, 1192, 1224, 1256, 1256, 1288, 1320, 1352, 1384, 1416, 1416, 1480, 1480, 1544, 1544, 1608, 1608, 1608, 1672, 1672, 1736, 1736, 1800, 1800, 1800, 1864, 1864, 1928, 1928, 1992, 1992, 2024, 2088, 2088, 2088, 2152, 2152, 2216, 2216, 2280, 2280, 2280, 2344, 2344, 2408, 2408, 2472, 2472, 2536, 2536, 2536, 2600, 2600, 2664, 2664, 2728, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 2984, 2984, 3112},
1261 {24, 56, 88, 144, 176, 208, 224, 256, 328, 344, 376, 424, 456, 488, 520, 568, 600, 632, 680, 712, 744, 776, 808, 872, 904, 936, 968, 1000, 1032, 1064, 1128, 1160, 1192, 1224, 1256, 1288, 1352, 1384, 1416, 1416, 1480, 1544, 1544, 1608, 1608, 1672, 1736, 1736, 1800, 1800, 1864, 1864, 1928, 1992, 1992, 2024, 2088, 2088, 2152, 2152, 2216, 2280, 2280, 2344, 2344, 2408, 2472, 2472, 2536, 2536, 2600, 2600, 2664, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008},
1262 {32, 72, 144, 176, 208, 256, 296, 328, 376, 424, 472, 520, 568, 616, 648, 696, 744, 776, 840, 872, 936, 968, 1000, 1064, 1096, 1160, 1192, 1256, 1288, 1320, 1384, 1416, 1480, 1544, 1544, 1608, 1672, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2024, 2088, 2088, 2152, 2216, 2216, 2280, 2344, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2664, 2728, 2792, 2856, 2856, 2856, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968},
1263 {40, 104, 176, 208, 256, 328, 392, 440, 504, 568, 616, 680, 744, 808, 872, 904, 968, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1864, 1928, 1992, 2024, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6200, 6456, 6456},
1264 {56, 120, 208, 256, 328, 408, 488, 552, 632, 696, 776, 840, 904, 1000, 1064, 1128, 1192, 1288, 1352, 1416, 1480, 1544, 1608, 1736, 1800, 1864, 1928, 1992, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2600, 2664, 2728, 2792, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992},
1265 {72, 144, 224, 328, 424, 504, 600, 680, 776, 872, 968, 1032, 1128, 1224, 1320, 1384, 1480, 1544, 1672, 1736, 1864, 1928, 2024, 2088, 2216, 2280, 2344, 2472, 2536, 2664, 2728, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4264, 4392, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9528},
1266 {328, 176, 256, 392, 504, 600, 712, 808, 936, 1032, 1128, 1224, 1352, 1480, 1544, 1672, 1736, 1864, 1992, 2088, 2216, 2280, 2408, 2472, 2600, 2728, 2792, 2984, 2984, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3880, 4008, 4136, 4136, 4264, 4392, 4584, 4584, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448},
1267 {104, 224, 328, 472, 584, 712, 840, 968, 1096, 1224, 1320, 1480, 1608, 1672, 1800, 1928, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536},
1268 {120, 256, 392, 536, 680, 808, 968, 1096, 1256, 1384, 1544, 1672, 1800, 1928, 2088, 2216, 2344, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15264},
1269 {136, 296, 456, 616, 776, 936, 1096, 1256, 1416, 1544, 1736, 1864, 2024, 2216, 2344, 2536, 2664, 2856, 2984, 3112, 3368, 3496, 3624, 3752, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 5160, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568},
1270 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080},
1271 {176, 376, 584, 776, 1000, 1192, 1384, 1608, 1800, 2024, 2216, 2408, 2600, 2792, 2984, 3240, 3496, 3624, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152},
1272 {208, 440, 680, 904, 1128, 1352, 1608, 1800, 2024, 2280, 2472, 2728, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 23688, 24496, 24496, 24496, 24496, 25456},
1273 {224, 488, 744, 1000, 1256, 1544, 1800, 2024, 2280, 2536, 2856, 3112, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 26416, 27376, 27376, 27376, 27376, 28336, 28336},
1274 {256, 552, 840, 1128, 1416, 1736, 1992, 2280, 2600, 2856, 3112, 3496, 3752, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 5992, 6200, 6456, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704},
1275 {280, 600, 904, 1224, 1544, 1800, 2152, 2472, 2728, 3112, 3368, 3624, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 6200, 6456, 6712, 6968, 7224, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008},
1276 {328, 632, 968, 1288, 1608, 1928, 2280, 2600, 2984, 3240, 3624, 3880, 4264, 4584, 4968, 5160, 5544, 5992, 6200, 6456, 6712, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 35160},
1277 {336, 696, 1064, 1416, 1800, 2152, 2536, 2856, 3240, 3624, 4008, 4392, 4776, 5160, 5352, 5736, 6200, 6456, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 39232},
1278 {376, 776, 1160, 1544, 1992, 2344, 2792, 3112, 3624, 4008, 4392, 4776, 5160, 5544, 5992, 6200, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14112, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816},
1279 {408, 840, 1288, 1736, 2152, 2600, 2984, 3496, 3880, 4264, 4776, 5160, 5544, 5992, 6456, 6968, 7224, 7736, 8248, 8504, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19080, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888},
1280 {440, 904, 1384, 1864, 2344, 2792, 3240, 3752, 4136, 4584, 5160, 5544, 5992, 6456, 6968, 7480, 7992, 8248, 8760, 9144, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 20616, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 48936, 51024, 51024, 51024},
1281 {488, 1000, 1480, 1992, 2472, 2984, 3496, 4008, 4584, 4968, 5544, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 9912, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056},
1282 {520, 1064, 1608, 2152, 2664, 3240, 3752, 4264, 4776, 5352, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 10296, 10680, 11448, 11832, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 19080, 19080, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256},
1283 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776},
1284 {584, 1192, 1800, 2408, 2984, 3624, 4264, 4968, 5544, 5992, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 22920, 23688, 24496, 25456, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592},
1285 {616, 1256, 1864, 2536, 3112, 3752, 4392, 5160, 5736, 6200, 6968, 7480, 8248, 8760, 9528, 10296, 10680, 11448, 12216, 12576, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592, 68808, 68808, 68808, 71112},
1286 {712, 1480, 2216, 2984, 3752, 4392, 5160, 5992, 6712, 7480, 8248, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 14688, 15264, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376}
1289 {32, 88, 152, 208, 256, 328, 376, 424, 488, 536, 600, 648, 712, 776, 808, 872, 936, 1000, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2088, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 5992, 6200},
1290 {56, 144, 208, 256, 344, 424, 488, 568, 632, 712, 776, 872, 936, 1000, 1064, 1160, 1224, 1288, 1384, 1416, 1544, 1608, 1672, 1736, 1800, 1864, 1992, 2024, 2088, 2152, 2280, 2344, 2408, 2472, 2536, 2600, 2728, 2792, 2856, 2856, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5352, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992},
1291 {72, 176, 256, 328, 424, 520, 616, 696, 776, 872, 968, 1064, 1160, 1256, 1320, 1416, 1544, 1608, 1672, 1800, 1864, 1992, 2088, 2152, 2216, 2344, 2408, 2536, 2600, 2664, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3368, 3496, 3624, 3624, 3752, 3880, 4008, 4008, 4136, 4264, 4264, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912},
1292 {104, 208, 328, 440, 568, 680, 808, 904, 1032, 1160, 1256, 1384, 1480, 1608, 1736, 1864, 1992, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2856, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4392, 4584, 4776, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12576, 12960, 12960},
1293 {120, 256, 408, 552, 696, 840, 1000, 1128, 1288, 1416, 1544, 1736, 1864, 1992, 2152, 2280, 2408, 2600, 2728, 2856, 2984, 3112, 3240, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840},
1294 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19080},
1295 {176, 392, 600, 808, 1032, 1224, 1480, 1672, 1864, 2088, 2280, 2472, 2728, 2984, 3112, 3368, 3496, 3752, 4008, 4136, 4392, 4584, 4776, 4968, 5160, 5352, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7992, 8248, 8248, 8504, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920},
1296 {224, 472, 712, 968, 1224, 1480, 1672, 1928, 2216, 2472, 2664, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376},
1297 {256, 536, 808, 1096, 1384, 1672, 1928, 2216, 2536, 2792, 3112, 3368, 3624, 3880, 4264, 4584, 4776, 4968, 5352, 5544, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576},
1298 {296, 616, 936, 1256, 1544, 1864, 2216, 2536, 2856, 3112, 3496, 3752, 4136, 4392, 4776, 5160, 5352, 5736, 5992, 6200, 6712, 6968, 7224, 7480, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160},
1299 {328, 680, 1032, 1384, 1736, 2088, 2472, 2792, 3112, 3496, 3880, 4264, 4584, 4968, 5352, 5736, 5992, 6200, 6712, 6968, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 21384, 21384, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888},
1300 {376, 776, 1192, 1608, 2024, 2408, 2792, 3240, 3624, 4008, 4392, 4776, 5352, 5736, 5992, 6456, 6968, 7224, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816},
1301 {440, 904, 1352, 1800, 2280, 2728, 3240, 3624, 4136, 4584, 4968, 5544, 5992, 6456, 6712, 7224, 7736, 8248, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 17568, 17568, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024},
1302 {488, 1000, 1544, 2024, 2536, 3112, 3624, 4136, 4584, 5160, 5736, 6200, 6712, 7224, 7736, 8248, 8760, 9144, 9912, 10296, 10680, 11448, 11832, 12216, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336},
1303 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776},
1304 {600, 1224, 1800, 2472, 3112, 3624, 4264, 4968, 5544, 6200, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 23688, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808},
1305 {632, 1288, 1928, 2600, 3240, 3880, 4584, 5160, 5992, 6456, 7224, 7736, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 68808, 71112, 71112, 71112, 71112},
1306 {696, 1416, 2152, 2856, 3624, 4392, 5160, 5736, 6456, 7224, 7992, 8760, 9528, 10296, 10680, 11448, 12216, 12960, 13536, 14688, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 71112, 73712, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 78704},
1307 {776, 1544, 2344, 3112, 4008, 4776, 5544, 6200, 7224, 7992, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 27376, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936},
1308 {840, 1736, 2600, 3496, 4264, 5160, 5992, 6968, 7736, 8504, 9528, 10296, 11064, 12216, 12960, 13536, 14688, 15264, 16416, 16992, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800},
1309 {904, 1864, 2792, 3752, 4584, 5544, 6456, 7480, 8248, 9144, 10296, 11064, 12216, 12960, 14112, 14688, 15840, 16992, 17568, 18336, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 42368, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 97896, 101840, 101840, 101840},
1310 {1000, 1992, 2984, 4008, 4968, 5992, 6968, 7992, 9144, 9912, 11064, 12216, 12960, 14112, 15264, 15840, 16992, 18336, 19080, 19848, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 31704, 32856, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136},
1311 {1064, 2152, 3240, 4264, 5352, 6456, 7480, 8504, 9528, 10680, 11832, 12960, 14112, 15264, 16416, 16992, 18336, 19080, 20616, 21384, 22920, 23688, 24496, 25456, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816},
1312 {1128, 2280, 3496, 4584, 5736, 6968, 7992, 9144, 10296, 11448, 12576, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 45352, 45352, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840,101840,101840,101840,105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496},
1313 {1192, 2408, 3624, 4968, 5992, 7224, 8504, 9912, 11064, 12216, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 42368, 43816, 45352, 46888, 46888, 48936, 51024, 51024, 52752, 52752, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208},
1314 {1256, 2536, 3752, 5160, 6200, 7480, 8760, 10296, 11448, 12576, 14112, 15264, 16416, 17568, 19080, 20616, 21384, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 43816, 43816, 45352, 46888, 48936, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040,115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208, 137792, 137792, 137792, 142248},
1315 {1480, 2984, 4392, 5992, 7480, 8760, 10296, 11832, 13536, 14688, 16416, 17568, 19080, 20616, 22152, 23688, 25456, 26416, 28336, 29296, 30576, 32856, 34008, 35160, 36696, 37888, 40576, 40576, 42368, 43816, 45352, 46888, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 63776, 63776, 66592, 68808, 68808, 71112, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 81176, 84760, 84760, 87936, 87936, 90816, 90816, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 128496, 128496, 128496, 133208, 133208, 133208, 137792, 137792, 137792, 142248, 142248, 142248, 146856, 146856,149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776}
1318 RgSchUlIMcsTbl rgUlIMcsTbl = {
1319 {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5},
1320 {2, 6}, {2, 7}, {2, 8}, {2, 9}, {2, 10},
1321 {4, 10}, {4, 11}, {4, 12}, {4, 13}, {4, 14},
1322 {4, 15}, {4, 16}, {4, 17}, {4, 18}, {4, 19},
1323 {6, 19}, {6, 20}, {6, 21}, {6, 22}, {6, 23},
1324 {6, 24}, {6, 25}, {6, 26}
1326 RgSchUeCatTbl rgUeCatTbl = {
1327 /*Column1:Maximum number of bits of an UL-SCH
1328 transport block transmitted within a TTI
1330 Column2:Maximum number of bits of a DLSCH
1331 transport block received within a TTI
1333 Column3:Total number of soft channel bits
1335 Column4:Support for 64QAM in UL
1337 Column5:Maximum number of DL-SCH transport
1338 block bits received within a TTI
1340 Column6:Maximum number of supported layers for
1341 spatial multiplexing in DL
1343 {5160, {10296,0}, 250368, FALSE, 10296, 1},
1344 {25456, {51024,0}, 1237248, FALSE, 51024, 2},
1345 {51024, {75376,0}, 1237248, FALSE, 102048, 2},
1346 {51024, {75376,0}, 1827072, FALSE, 150752, 2},
1347 {75376, {149776,0}, 3667200, TRUE, 299552, 4},
1348 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1349 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1350 {149776,{299856,0}, 35982720,TRUE, 2998560, 8}
1353 /* [ccpu00138532]-ADD-The below table stores the min HARQ RTT time
1354 in Downlink for TDD and FDD. Indices 0 to 6 map to tdd UL DL config 0-6.
1355 Index 7 map to FDD */
1356 U8 rgSchCmnHarqRtt[8] = {4,7,10,9,12,15,6,8};
1357 /* Number of CFI Switchover Index is equals to 7 TDD Indexes + 1 FDD index */
1358 U8 rgSchCfiSwitchOvrWinLen[] = {7, 4, 2, 3, 2, 1, 6, 8};
1360 /* EffTbl is calculated for single layer and two layers.
1361 * CqiToTbs is calculated for single layer and two layers */
1362 RgSchCmnTbSzEff rgSchCmnNorCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1363 RgSchCmnTbSzEff rgSchCmnNorCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1364 /* New variable to store UL effiency values for normal and extended CP*/
1365 RgSchCmnTbSzEff rgSchCmnNorUlEff[1],rgSchCmnExtUlEff[1];
1366 RgSchCmnCqiToTbs rgSchCmnNorCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1367 RgSchCmnCqiToTbs rgSchCmnNorCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1368 RgSchCmnCqiToTbs *rgSchCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
1369 RgSchCmnTbSzEff rgSchCmnExtCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1370 RgSchCmnTbSzEff rgSchCmnExtCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1371 RgSchCmnCqiToTbs rgSchCmnExtCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1372 RgSchCmnCqiToTbs rgSchCmnExtCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1373 /* Include CRS REs while calculating Efficiency */
1374 RgSchCmnTbSzEff *rgSchCmnEffTbl[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_ANT_CONF][RG_SCH_CMN_MAX_CFI];
1375 RgSchCmnTbSzEff *rgSchCmnUlEffTbl[RG_SCH_CMN_MAX_CP];
1377 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3, 1};
1379 /* Added matrix 'rgRaPrmblToRaFrmTbl'for computation of RA sub-frames from RA preamble */
1380 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3};
1383 EXTERN RgUlSchdInits rgSchUlSchdInits;
1384 EXTERN RgDlSchdInits rgSchDlSchdInits;
1385 EXTERN RgDlfsSchdInits rgSchDlfsSchdInits;
1387 EXTERN RgEmtcUlSchdInits rgSchEmtcUlSchdInits;
1388 EXTERN RgEmtcDlSchdInits rgSchEmtcDlSchdInits;
1392 PRIVATE S16 rgSCHCmnUeIdleExdThrsld ARGS((
1396 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe ARGS((
1400 PRIVATE Void rgSCHCmnDelDedPreamble ARGS((
1404 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe ARGS((
1407 CmLteTimingInfo timingInfo
1409 PRIVATE Void rgSCHCmnDelRachInfo ARGS((
1413 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe ARGS((
1419 PRIVATE Void rgSCHCmnHdlHoPo ARGS((
1421 CmLListCp *raRspLst,
1422 RgSchRaReqInfo *raReq
1424 PRIVATE Void rgSCHCmnAllocPoHoGrnt ARGS((
1426 CmLListCp *raRspLst,
1428 RgSchRaReqInfo *raReq
1430 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf ARGS((
1437 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ ARGS((
1441 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ ARGS((
1445 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx ARGS((
1448 PRIVATE Void rgSCHCmnUpdRachParam ARGS((
1451 PRIVATE S16 rgSCHCmnAllocPOParam ARGS((
1459 PRIVATE Void rgSCHCmnGenPdcchOrder ARGS((
1463 PRIVATE Void rgSCHCmnCfgRachDedPrm ARGS((
1468 PRIVATE Void rgSCHCmnHdlUlInactUes ARGS((
1471 PRIVATE Void rgSCHCmnHdlDlInactUes ARGS((
1474 PRIVATE Void rgSCHCmnUlInit ARGS((Void
1476 PRIVATE Void rgSCHCmnDlInit ARGS((Void
1478 PRIVATE Void rgSCHCmnInitDlRbAllocInfo ARGS((
1479 RgSchCmnDlRbAllocInfo *allocInfo
1481 PRIVATE Void rgSCHCmnUpdUlCompEffBsr ARGS((
1485 PRIVATE Void rgSCHCmnUlSetAllUnSched ARGS((
1486 RgSchCmnUlRbAllocInfo *allocInfo
1488 PRIVATE Void rgSCHCmnUlUpdSf ARGS((
1490 RgSchCmnUlRbAllocInfo *allocInfo,
1493 PRIVATE Void rgSCHCmnUlHndlAllocRetx ARGS((
1495 RgSchCmnUlRbAllocInfo *allocInfo,
1500 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch ARGS((
1504 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch ARGS((
1508 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ ARGS((
1512 PRIVATE S16 rgSCHCmnTmrExpiry ARGS((
1513 PTR cb, /* Pointer to timer control block */
1514 S16 tmrEvnt /* Timer Event */
1516 PRIVATE S16 rgSCHCmnTmrProc ARGS((
1519 PRIVATE Void rgSCHCmnAddUeToRefreshQ ARGS((
1524 PRIVATE Void rgSCHCmnDlCcchRetx ARGS((
1526 RgSchCmnDlRbAllocInfo *allocInfo
1528 PRIVATE Void rgSCHCmnUpdUeMimoInfo ARGS((
1532 RgSchCmnCell *cellSchd
1534 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo ARGS((
1538 RgSchCmnUe *ueSchCmn,
1539 RgSchCmnCell *cellSchd,
1543 PRIVATE Void rgSCHCmnDlCcchSduRetx ARGS((
1545 RgSchCmnDlRbAllocInfo *allocInfo
1547 PRIVATE Void rgSCHCmnDlCcchSduTx ARGS((
1549 RgSchCmnDlRbAllocInfo *allocInfo
1551 PRIVATE S16 rgSCHCmnCcchSduAlloc ARGS((
1554 RgSchCmnDlRbAllocInfo *allocInfo
1556 PRIVATE S16 rgSCHCmnCcchSduDedAlloc ARGS((
1560 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc ARGS((
1566 PRIVATE Void rgSCHCmnInitVars ARGS((
1570 /*ccpu00117180 - DEL - Moved rgSCHCmnUpdVars to .x as its access is now PUBLIC */
1571 PRIVATE Void rgSCHCmnUlRbAllocForLst ARGS((
1577 CmLListCp *nonSchdLst,
1580 PRIVATE S16 rgSCHCmnUlRbAllocForUe ARGS((
1587 PRIVATE Void rgSCHCmnMsg3GrntReq ARGS((
1591 RgSchUlHqProcCb *hqProc,
1592 RgSchUlAlloc **ulAllocRef,
1596 PRIVATE Void rgSCHCmnUlNonadapRetx ARGS((
1597 RgSchCmnUlCell *cellUl,
1598 RgSchUlAlloc *alloc,
1602 PRIVATE Void rgSCHCmnDlCcchRarAlloc ARGS((
1605 PRIVATE Void rgSCHCmnDlCcchTx ARGS((
1607 RgSchCmnDlRbAllocInfo *allocInfo
1609 PRIVATE Void rgSCHCmnDlBcchPcch ARGS((
1611 RgSchCmnDlRbAllocInfo *allocInfo,
1612 RgInfSfAlloc *subfrmAlloc
1614 PUBLIC Bool rgSCHCmnChkInWin ARGS((
1615 CmLteTimingInfo frm,
1616 CmLteTimingInfo start,
1619 PUBLIC Bool rgSCHCmnChkPastWin ARGS((
1620 CmLteTimingInfo frm,
1623 PRIVATE Void rgSCHCmnClcAlloc ARGS((
1626 RgSchClcDlLcCb *lch,
1628 RgSchCmnDlRbAllocInfo *allocInfo
1631 PRIVATE Void rgSCHCmnClcRbAlloc ARGS((
1642 PRIVATE S16 rgSCHCmnMsg4Alloc ARGS((
1645 RgSchCmnDlRbAllocInfo *allocInfo
1647 PRIVATE S16 rgSCHCmnMsg4DedAlloc ARGS((
1651 PRIVATE Void rgSCHCmnDlRaRsp ARGS((
1653 RgSchCmnDlRbAllocInfo *allocInfo
1655 PRIVATE S16 rgSCHCmnRaRspAlloc ARGS((
1661 RgSchCmnDlRbAllocInfo *allocInfo
1663 PRIVATE Void rgSCHCmnUlUeDelAllocs ARGS((
1667 PRIVATE Void rgSCHCmnDlSetUeAllocLmt ARGS((
1672 PRIVATE S16 rgSCHCmnDlRgrCellCfg ARGS((
1677 PRIVATE Void rgSCHCmnUlAdapRetx ARGS((
1678 RgSchUlAlloc *alloc,
1679 RgSchUlHqProcCb *proc
1681 PRIVATE Void rgSCHCmnUlUpdAllocRetx ARGS((
1685 PRIVATE Void rgSCHCmnUlSfReTxAllocs ARGS((
1689 /* Fix: syed Adaptive Msg3 Retx crash. */
1691 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs ARGS((
1697 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1701 RgrUeRecfg *ueRecfg,
1705 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1715 * DL RB allocation specific functions
1718 PRIVATE Void rgSCHCmnDlRbAlloc ARGS((
1720 RgSchCmnDlRbAllocInfo *allocInfo
1722 PRIVATE Void rgSCHCmnNonDlfsRbAlloc ARGS((
1724 RgSchCmnDlRbAllocInfo *allocInfo
1726 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc ARGS((
1728 RgSchDlRbAlloc *cmnAllocInfo));
1731 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj ARGS((
1733 RgSchDlRbAlloc *cmnAllocInfo,
1737 /* Added function to adjust TBSize*/
1738 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj ARGS((
1739 RgSchDlRbAlloc *allocInfo,
1740 U8 numOvrlapgPbchRb,
1746 /* Added function to find num of overlapping PBCH rb*/
1747 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs ARGS((
1750 RgSchDlRbAlloc *allocInfo,
1751 U8 *numOvrlapgPbchRb
1754 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl ARGS((
1757 RgSchDlRbAlloc *allocInfo
1761 PRIVATE Void rgSCHCmnFindCodeRate ARGS((
1764 RgSchDlRbAlloc *allocInfo,
1770 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc ARGS((
1772 RgSchCmnMsg4RbAlloc *msg4AllocInfo,
1775 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc ARGS((
1781 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc ARGS((
1788 PRIVATE U32 rgSCHCmnCalcRiv ARGS(( U8 bw,
1794 PRIVATE Void rgSCHCmnUpdHqAndDai ARGS((
1795 RgSchDlHqProcCb *hqP,
1797 RgSchDlHqTbCb *tbCb,
1800 PRIVATE S16 rgSCHCmnUlCalcAvailBw ARGS((
1802 RgrCellCfg *cellCfg,
1807 PRIVATE S16 rgSCHCmnDlKdashUlAscInit ARGS((
1810 PRIVATE S16 rgSCHCmnDlANFdbkInit ARGS((
1813 PRIVATE S16 rgSCHCmnDlNpValInit ARGS((
1816 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst ARGS((
1819 PRIVATE S16 rgSCHCmnDlCpyRachInfo ARGS((
1821 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
1824 PRIVATE S16 rgSCHCmnDlRachInfoInit ARGS((
1827 PRIVATE S16 rgSCHCmnDlPhichOffsetInit ARGS((
1832 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt ARGS
1838 PRIVATE RgSchCmnRank rgSCHCmnComputeRank ARGS
1845 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3 ARGS
1850 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3 ARGS
1855 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4 ARGS
1860 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4 ARGS
1865 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr ARGS
1872 /* comcodsepa : start */
1875 * @brief This function computes efficiency and stores in a table.
1879 * Function: rgSCHCmnCompEff
1880 * Purpose: this function computes the efficiency as number of
1881 * bytes per 1024 symbols. The CFI table is also filled
1882 * with the same information such that comparison is valid
1884 * Invoked by: Scheduler
1886 * @param[in] U8 noPdcchSym
1887 * @param[in] U8 cpType
1888 * @param[in] U8 txAntIdx
1889 * @param[in] RgSchCmnTbSzEff* effTbl
1894 PRIVATE Void rgSCHCmnCompEff
1899 RgSchCmnTbSzEff *effTbl
1902 PRIVATE Void rgSCHCmnCompEff(noPdcchSym, cpType, txAntIdx, effTbl)
1906 RgSchCmnTbSzEff *effTbl;
1911 U8 resOfCrs; /* Effective REs occupied by CRS */
1914 TRC2(rgSCHCmnCompEff);
1918 case RG_SCH_CMN_NOR_CP:
1921 case RG_SCH_CMN_EXT_CP:
1925 /* Generate a log error. This case should never be executed */
1929 /* Depending on the Tx Antenna Index, deduct the
1930 * Resource elements for the CRS */
1934 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
1937 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
1940 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
1943 /* Generate a log error. This case should never be executed */
1946 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
1947 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
1950 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
1952 /* This line computes the coding efficiency per 1024 REs */
1953 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
1955 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
1960 * @brief This function computes efficiency and stores in a table.
1964 * Function: rgSCHCmnCompUlEff
1965 * Purpose: this function computes the efficiency as number of
1966 * bytes per 1024 symbols. The CFI table is also filled
1967 * with the same information such that comparison is valid
1969 * Invoked by: Scheduler
1971 * @param[in] U8 noUlRsSym
1972 * @param[in] U8 cpType
1973 * @param[in] U8 txAntIdx
1974 * @param[in] RgSchCmnTbSzEff* effTbl
1979 PRIVATE Void rgSCHCmnCompUlEff
1983 RgSchCmnTbSzEff *effTbl
1986 PRIVATE Void rgSCHCmnCompUlEff(noUlRsSym, cpType, effTbl)
1989 RgSchCmnTbSzEff *effTbl;
1996 TRC2(rgSCHCmnCompUlEff);
2000 case RG_SCH_CMN_NOR_CP:
2003 case RG_SCH_CMN_EXT_CP:
2007 /* Generate a log error. This case should never be executed */
2011 noResPerRb = ((noSymPerRb - noUlRsSym) * RB_SCH_CMN_NUM_SCS_PER_RB);
2012 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2015 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2017 /* This line computes the coding efficiency per 1024 REs */
2018 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
2020 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
2026 * @brief This function computes efficiency for 2 layers and stores in a table.
2030 * Function: rgSCHCmn2LyrCompEff
2031 * Purpose: this function computes the efficiency as number of
2032 * bytes per 1024 symbols. The CFI table is also filled
2033 * with the same information such that comparison is valid
2035 * Invoked by: Scheduler
2037 * @param[in] U8 noPdcchSym
2038 * @param[in] U8 cpType
2039 * @param[in] U8 txAntIdx
2040 * @param[in] RgSchCmnTbSzEff* effTbl2Lyr
2045 PRIVATE Void rgSCHCmn2LyrCompEff
2050 RgSchCmnTbSzEff *effTbl2Lyr
2053 PRIVATE Void rgSCHCmn2LyrCompEff(noPdcchSym, cpType, txAntIdx, effTbl2Lyr)
2057 RgSchCmnTbSzEff *effTbl2Lyr;
2062 U8 resOfCrs; /* Effective REs occupied by CRS */
2065 TRC2(rgSCHCmn2LyrCompEff);
2069 case RG_SCH_CMN_NOR_CP:
2072 case RG_SCH_CMN_EXT_CP:
2076 /* Generate a log error. This case should never be executed */
2080 /* Depending on the Tx Antenna Index, deduct the
2081 * Resource elements for the CRS */
2085 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
2088 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
2091 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
2094 /* Generate a log error. This case should never be executed */
2098 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
2099 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2101 (*effTbl2Lyr)[i] = 0;
2102 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2104 /* This line computes the coding efficiency per 1024 REs */
2105 (*effTbl2Lyr)[i] += (rgTbSzTbl[1][i][j] * 1024) / (noResPerRb * (j+1));
2107 (*effTbl2Lyr)[i] /= RG_SCH_CMN_NUM_RBS;
2114 * @brief This function initializes the rgSchCmnDciFrmtSizes table.
2118 * Function: rgSCHCmnGetDciFrmtSizes
2119 * Purpose: This function determines the sizes of all
2120 * the available DCI Formats. The order of
2121 * bits addition for each format is inaccordance
2123 * Invoked by: rgSCHCmnRgrCellCfg
2129 PRIVATE Void rgSCHCmnGetDciFrmtSizes
2134 PRIVATE Void rgSCHCmnGetDciFrmtSizes(cell)
2139 TRC2(rgSCHCmnGetDciFrmtSizes);
2141 /* DCI Format 0 size determination */
2142 rgSchCmnDciFrmtSizes[0] = 1 +
2144 rgSCHUtlLog32bitNbase2((cell->bwCfg.ulTotalBw * \
2145 (cell->bwCfg.ulTotalBw + 1))/2) +
2155 /* DCI Format 1 size determination */
2156 rgSchCmnDciFrmtSizes[1] = 1 +
2157 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2162 4 + 2 + /* HqProc Id and DAI */
2168 /* DCI Format 1A size determination */
2169 rgSchCmnDciFrmtSizes[2] = 1 + /* Flag for format0/format1a differentiation */
2170 1 + /* Localized/distributed VRB assignment flag */
2173 3 + /* Harq process Id */
2175 4 + /* Harq process Id */
2176 2 + /* UL Index or DAI */
2178 1 + /* New Data Indicator */
2181 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2182 (cell->bwCfg.dlTotalBw + 1))/2);
2183 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
2184 Since VRB is local */
2186 /* DCI Format 1B size determination */
2187 rgSchCmnDciFrmtSizes[3] = 1 +
2188 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2189 (cell->bwCfg.dlTotalBw + 1))/2) +
2199 ((cell->numTxAntPorts == 4)? 4:2) +
2202 /* DCI Format 1C size determination */
2203 /* Approximation: NDLVrbGap1 ~= Nprb for DL */
2204 rgSchCmnDciFrmtSizes[4] = (cell->bwCfg.dlTotalBw < 50)? 0:1 +
2205 (cell->bwCfg.dlTotalBw < 50)?
2206 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/2 * \
2207 (cell->bwCfg.dlTotalBw/2 + 1))/2)) :
2208 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/4 * \
2209 (cell->bwCfg.dlTotalBw/4 + 1))/2)) +
2212 /* DCI Format 1D size determination */
2213 rgSchCmnDciFrmtSizes[5] = 1 +
2214 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2215 (cell->bwCfg.dlTotalBw + 1))/2) +
2224 ((cell->numTxAntPorts == 4)? 4:2) +
2227 /* DCI Format 2 size determination */
2228 rgSchCmnDciFrmtSizes[6] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2229 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2237 ((cell->numTxAntPorts == 4)? 6:3);
2239 /* DCI Format 2A size determination */
2240 rgSchCmnDciFrmtSizes[7] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2241 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2249 ((cell->numTxAntPorts == 4)? 2:0);
2251 /* DCI Format 3 size determination */
2252 rgSchCmnDciFrmtSizes[8] = rgSchCmnDciFrmtSizes[0];
2254 /* DCI Format 3A size determination */
2255 rgSchCmnDciFrmtSizes[9] = rgSchCmnDciFrmtSizes[0];
2262 * @brief This function initializes the cmnCell->dciAggrLvl table.
2266 * Function: rgSCHCmnGetCqiDciFrmt2AggrLvl
2267 * Purpose: This function determines the Aggregation level
2268 * for each CQI level against each DCI format.
2269 * Invoked by: rgSCHCmnRgrCellCfg
2275 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl
2280 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl(cell)
2284 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2288 TRC2(rgSCHCmnGetCqiDciFrmt2AggrLvl);
2290 for (i = 0; i < RG_SCH_CMN_MAX_CQI; i++)
2292 for (j = 0; j < 10; j++)
2294 U32 pdcchBits; /* Actual number of phy bits needed for a given DCI Format
2295 * for a given CQI Level */
2296 pdcchBits = (rgSchCmnDciFrmtSizes[j] * 1024)/rgSchCmnCqiPdcchEff[i];
2298 if (pdcchBits < 192)
2300 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL2;
2303 if (pdcchBits < 384)
2305 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL4;
2308 if (pdcchBits < 768)
2310 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL8;
2313 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL16;
2320 * @brief This function initializes all the data for the scheduler.
2324 * Function: rgSCHCmnDlInit
2325 * Purpose: This function initializes the following information:
2326 * 1. Efficiency table
2327 * 2. CQI to table index - It is one row for upto 3 RBs
2328 * and another row for greater than 3 RBs
2329 * currently extended prefix is compiled out.
2330 * Invoked by: MAC intialization code..may be ActvInit
2336 PRIVATE Void rgSCHCmnDlInit
2340 PRIVATE Void rgSCHCmnDlInit()
2347 RgSchCmnTbSzEff *effTbl;
2348 RgSchCmnCqiToTbs *tbsTbl;
2350 TRC2(rgSCHCmnDlInit);
2352 /* 0 corresponds to Single layer case, 1 corresponds to 2 layers case*/
2353 /* Init Efficiency table for normal cyclic prefix */
2354 /*Initialize Efficiency table for Layer Index 0 */
2355 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2356 /*Initialize Efficiency table for each of the CFI indices. The
2357 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2358 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[0];
2359 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[0];
2360 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[0];
2361 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[0];
2362 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2363 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[0];
2364 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[0];
2365 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[0];
2366 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[0];
2367 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2368 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[0];
2369 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[0];
2370 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[0];
2371 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[0];
2373 /*Initialize CQI to TBS table for Layer Index 0 for Normal CP */
2374 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[0];
2375 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[0];
2376 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[0];
2377 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[0];
2379 /*Intialize Efficency table for Layer Index 1 */
2380 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2381 /*Initialize Efficiency table for each of the CFI indices. The
2382 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2383 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[1];
2384 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[1];
2385 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[1];
2386 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[1];
2387 /*Initialize Efficiency table for Tx Antenna Port Index 1 */
2388 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[1];
2389 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[1];
2390 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[1];
2391 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[1];
2392 /*Initialize Efficiency table for Tx Antenna Port Index 2 */
2393 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[1];
2394 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[1];
2395 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[1];
2396 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[1];
2398 /*Initialize CQI to TBS table for Layer Index 1 for Normal CP */
2399 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[1];
2400 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[1];
2401 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[1];
2402 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[1];
2404 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2406 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2408 /* EfficiencyTbl calculation incase of 2 layers for normal CP */
2409 rgSCHCmnCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx,\
2410 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i]);
2411 rgSCHCmn2LyrCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx, \
2412 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i]);
2416 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2418 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2420 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i];
2421 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][i];
2422 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2423 (j >= 0) && (k > 0); --j)
2425 /* ADD CQI to MCS mapping correction
2426 * single dimensional array is replaced by 2 dimensions for different CFI*/
2427 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2429 (*tbsTbl)[k--] = (U8)j;
2436 /* effTbl,tbsTbl calculation incase of 2 layers for normal CP */
2437 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i];
2438 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][i];
2439 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2440 (j >= 0) && (k > 0); --j)
2442 /* ADD CQI to MCS mapping correction
2443 * single dimensional array is replaced by 2 dimensions for different CFI*/
2444 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2446 (*tbsTbl)[k--] = (U8)j;
2456 /* Efficiency Table for Extended CP */
2457 /*Initialize Efficiency table for Layer Index 0 */
2458 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2459 /*Initialize Efficiency table for each of the CFI indices. The
2460 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2461 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[0];
2462 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[0];
2463 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[0];
2464 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[0];
2465 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2466 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[0];
2467 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[0];
2468 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[0];
2469 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[0];
2470 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2471 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[0];
2472 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[0];
2473 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[0];
2474 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[0];
2476 /*Initialize CQI to TBS table for Layer Index 0 for Extended CP */
2477 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[0];
2478 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[0];
2479 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[0];
2480 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[0];
2482 /*Initialize Efficiency table for Layer Index 1 */
2483 /*Initialize Efficiency table for each of the CFI indices. The
2484 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2485 /*Initialize Efficency table for Tx Antenna Port Index 0 */
2486 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[1];
2487 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[1];
2488 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[1];
2489 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[1];
2490 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2491 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[1];
2492 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[1];
2493 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[1];
2494 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[1];
2495 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2496 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[1];
2497 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[1];
2498 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[1];
2499 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[1];
2501 /*Initialize CQI to TBS table for Layer Index 1 for Extended CP */
2502 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[1];
2503 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[1];
2504 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[1];
2505 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[1];
2506 /* Activate this code when extended cp is supported */
2507 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2509 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2511 /* EfficiencyTbl calculation incase of 2 layers for extendedl CP */
2512 rgSCHCmnCompEff( (U8)(i + 1 ), (U8)RG_SCH_CMN_EXT_CP, idx,\
2513 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i]);
2514 rgSCHCmn2LyrCompEff((U8)(i + 1), (U8) RG_SCH_CMN_EXT_CP,idx, \
2515 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i]);
2519 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2521 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2523 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i];
2524 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][i];
2525 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2526 (j >= 0) && (k > 0); --j)
2528 /* ADD CQI to MCS mapping correction
2529 * single dimensional array is replaced by 2 dimensions for different CFI*/
2530 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2532 (*tbsTbl)[k--] = (U8)j;
2539 /* effTbl,tbsTbl calculation incase of 2 layers for extended CP */
2540 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i];
2541 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][i];
2542 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2543 (j >= 0) && (k > 0); --j)
2545 /* ADD CQI to MCS mapping correction
2546 * single dimensional array is replaced by 2 dimensions for different CFI*/
2547 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2549 (*tbsTbl)[k--] = (U8)j;
2562 * @brief This function initializes all the data for the scheduler.
2566 * Function: rgSCHCmnUlInit
2567 * Purpose: This function initializes the following information:
2568 * 1. Efficiency table
2569 * 2. CQI to table index - It is one row for upto 3 RBs
2570 * and another row for greater than 3 RBs
2571 * currently extended prefix is compiled out.
2572 * Invoked by: MAC intialization code..may be ActvInit
2578 PRIVATE Void rgSCHCmnUlInit
2582 PRIVATE Void rgSCHCmnUlInit()
2585 U8 *mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_NOR_CP][0];
2586 RgSchCmnTbSzEff *effTbl = &rgSchCmnNorUlEff[0];
2587 CONSTANT RgSchCmnUlCqiInfo *cqiTbl = &rgSchCmnUlCqiTbl[0];
2590 TRC2(rgSCHCmnUlInit);
2592 /* Initaializing new variable added for UL eff */
2593 rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP] = &rgSchCmnNorUlEff[0];
2594 /* Reason behind using 3 as the number of symbols to rule out for
2595 * efficiency table computation would be that we are using 2 symbols for
2596 * DMRS(1 in each slot) and 1 symbol for SRS*/
2597 rgSCHCmnCompUlEff(RGSCH_UL_SYM_DMRS_SRS,RG_SCH_CMN_NOR_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP]);
2599 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2600 i >= 0 && j > 0; --i)
2602 if ((*effTbl)[i] <= cqiTbl[j].eff)
2604 mapTbl[j--] = (U8)i;
2611 effTbl = &rgSchCmnExtUlEff[0];
2612 mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_EXT_CP][0];
2614 /* Initaializing new variable added for UL eff */
2615 rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP] = &rgSchCmnExtUlEff[0];
2616 /* Reason behind using 3 as the number of symbols to rule out for
2617 * efficiency table computation would be that we are using 2 symbols for
2618 * DMRS(1 in each slot) and 1 symbol for SRS*/
2619 rgSCHCmnCompUlEff(3,RG_SCH_CMN_EXT_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP]);
2621 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2622 i >= 0 && j > 0; --i)
2624 if ((*effTbl)[i] <= cqiTbl[j].eff)
2626 mapTbl[j--] = (U8)i;
2638 * @brief This function initializes all the data for the scheduler.
2642 * Function: rgSCHCmnInit
2643 * Purpose: This function initializes the following information:
2644 * 1. Efficiency table
2645 * 2. CQI to table index - It is one row for upto 3 RBs
2646 * and another row for greater than 3 RBs
2647 * currently extended prefix is compiled out.
2648 * Invoked by: MAC intialization code..may be ActvInit
2654 PUBLIC Void rgSCHCmnInit
2658 PUBLIC Void rgSCHCmnInit()
2667 rgSCHEmtcCmnDlInit();
2668 rgSCHEmtcCmnUlInit();
2674 /* Init the function pointers */
2675 rgSchCmnApis.rgSCHRgrUeCfg = rgSCHCmnRgrUeCfg;
2676 rgSchCmnApis.rgSCHRgrUeRecfg = rgSCHCmnRgrUeRecfg;
2677 rgSchCmnApis.rgSCHFreeUe = rgSCHCmnUeDel;
2678 rgSchCmnApis.rgSCHRgrCellCfg = rgSCHCmnRgrCellCfg;
2679 rgSchCmnApis.rgSCHRgrCellRecfg = rgSCHCmnRgrCellRecfg;
2680 rgSchCmnApis.rgSCHFreeCell = rgSCHCmnCellDel;
2681 rgSchCmnApis.rgSCHRgrLchCfg = rgSCHCmnRgrLchCfg;
2682 rgSchCmnApis.rgSCHRgrLcgCfg = rgSCHCmnRgrLcgCfg;
2683 rgSchCmnApis.rgSCHRgrLchRecfg = rgSCHCmnRgrLchRecfg;
2684 rgSchCmnApis.rgSCHRgrLcgRecfg = rgSCHCmnRgrLcgRecfg;
2685 rgSchCmnApis.rgSCHFreeDlLc = rgSCHCmnFreeDlLc;
2686 rgSchCmnApis.rgSCHFreeLcg = rgSCHCmnLcgDel;
2687 rgSchCmnApis.rgSCHRgrLchDel = rgSCHCmnRgrLchDel;
2688 rgSchCmnApis.rgSCHActvtUlUe = rgSCHCmnActvtUlUe;
2689 rgSchCmnApis.rgSCHActvtDlUe = rgSCHCmnActvtDlUe;
2690 rgSchCmnApis.rgSCHHdlUlTransInd = rgSCHCmnHdlUlTransInd;
2691 rgSchCmnApis.rgSCHDlDedBoUpd = rgSCHCmnDlDedBoUpd;
2692 rgSchCmnApis.rgSCHUlRecMsg3Alloc = rgSCHCmnUlRecMsg3Alloc;
2693 rgSchCmnApis.rgSCHUlCqiInd = rgSCHCmnUlCqiInd;
2694 rgSchCmnApis.rgSCHPucchDeltaPwrInd = rgSCHPwrPucchDeltaInd;
2695 rgSchCmnApis.rgSCHUlHqProcForUe = rgSCHCmnUlHqProcForUe;
2697 rgSchCmnApis.rgSCHUpdUlHqProc = rgSCHCmnUpdUlHqProc;
2699 rgSchCmnApis.rgSCHUpdBsrShort = rgSCHCmnUpdBsrShort;
2700 rgSchCmnApis.rgSCHUpdBsrTrunc = rgSCHCmnUpdBsrTrunc;
2701 rgSchCmnApis.rgSCHUpdBsrLong = rgSCHCmnUpdBsrLong;
2702 rgSchCmnApis.rgSCHUpdPhr = rgSCHCmnUpdPhr;
2703 rgSchCmnApis.rgSCHUpdExtPhr = rgSCHCmnUpdExtPhr;
2704 rgSchCmnApis.rgSCHContResUlGrant = rgSCHCmnContResUlGrant;
2705 rgSchCmnApis.rgSCHSrRcvd = rgSCHCmnSrRcvd;
2706 rgSchCmnApis.rgSCHFirstRcptnReq = rgSCHCmnFirstRcptnReq;
2707 rgSchCmnApis.rgSCHNextRcptnReq = rgSCHCmnNextRcptnReq;
2708 rgSchCmnApis.rgSCHFirstHqFdbkAlloc = rgSCHCmnFirstHqFdbkAlloc;
2709 rgSchCmnApis.rgSCHNextHqFdbkAlloc = rgSCHCmnNextHqFdbkAlloc;
2710 rgSchCmnApis.rgSCHDlProcAddToRetx = rgSCHCmnDlProcAddToRetx;
2711 rgSchCmnApis.rgSCHDlCqiInd = rgSCHCmnDlCqiInd;
2713 rgSchCmnApis.rgSCHUlProcAddToRetx = rgSCHCmnEmtcUlProcAddToRetx;
2716 rgSchCmnApis.rgSCHSrsInd = rgSCHCmnSrsInd;
2718 rgSchCmnApis.rgSCHDlTARpt = rgSCHCmnDlTARpt;
2719 rgSchCmnApis.rgSCHDlRlsSubFrm = rgSCHCmnDlRlsSubFrm;
2720 rgSchCmnApis.rgSCHUeReset = rgSCHCmnUeReset;
2722 rgSchCmnApis.rgSCHHdlCrntiCE = rgSCHCmnHdlCrntiCE;
2723 rgSchCmnApis.rgSCHDlProcAck = rgSCHCmnDlProcAck;
2724 rgSchCmnApis.rgSCHDlRelPdcchFbk = rgSCHCmnDlRelPdcchFbk;
2725 rgSchCmnApis.rgSCHUlSpsRelInd = rgSCHCmnUlSpsRelInd;
2726 rgSchCmnApis.rgSCHUlSpsActInd = rgSCHCmnUlSpsActInd;
2727 rgSchCmnApis.rgSCHUlCrcFailInd = rgSCHCmnUlCrcFailInd;
2728 rgSchCmnApis.rgSCHUlCrcInd = rgSCHCmnUlCrcInd;
2730 rgSchCmnApis.rgSCHDrxStrtInActvTmrInUl = rgSCHCmnDrxStrtInActvTmrInUl;
2731 rgSchCmnApis.rgSCHUpdUeDataIndLcg = rgSCHCmnUpdUeDataIndLcg;
2733 for (idx = 0; idx < RGSCH_NUM_SCHEDULERS; ++idx)
2735 rgSchUlSchdInits[idx](&rgSchUlSchdTbl[idx]);
2736 rgSchDlSchdInits[idx](&rgSchDlSchdTbl[idx]);
2739 for (idx = 0; idx < RGSCH_NUM_EMTC_SCHEDULERS; ++idx)
2741 rgSchEmtcUlSchdInits[idx](&rgSchEmtcUlSchdTbl[idx]);
2742 rgSchEmtcDlSchdInits[idx](&rgSchEmtcDlSchdTbl[idx]);
2745 #if (defined (RG_PHASE2_SCHED) && defined(TFU_UPGRADE))
2746 for (idx = 0; idx < RGSCH_NUM_DLFS_SCHEDULERS; ++idx)
2748 rgSchDlfsSchdInits[idx](&rgSchDlfsSchdTbl[idx]);
2752 rgSchCmnApis.rgSCHRgrSCellUeCfg = rgSCHCmnRgrSCellUeCfg;
2753 rgSchCmnApis.rgSCHRgrSCellUeDel = rgSCHCmnRgrSCellUeDel;
2760 * @brief This function is a wrapper to call scheduler specific API.
2764 * Function: rgSCHCmnDlRlsSubFrm
2765 * Purpose: Releases scheduler Information from DL SubFrm.
2769 * @param[in] RgSchCellCb *cell
2770 * @param[out] CmLteTimingInfo frm
2775 PUBLIC Void rgSCHCmnDlRlsSubFrm
2781 PUBLIC Void rgSCHCmnDlRlsSubFrm(cell, frm)
2783 CmLteTimingInfo frm;
2786 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2789 TRC2(rgSCHCmnDlRlsSubFrm);
2791 /* Get the pointer to the subframe */
2792 sf = rgSCHUtlSubFrmGet(cell, frm);
2794 rgSCHUtlSubFrmPut(cell, sf);
2797 /* Re-initialize DLFS specific information for the sub-frame */
2798 cellSch->apisDlfs->rgSCHDlfsReinitSf(cell, sf);
2806 * @brief This function is the starting function for DL allocation.
2810 * Function: rgSCHCmnDlCmnChAlloc
2811 * Purpose: Scheduling for downlink. It performs allocation in the order
2812 * of priority wich BCCH/PCH first, CCCH, Random Access and TA.
2814 * Invoked by: Scheduler
2816 * @param[in] RgSchCellCb* cell
2817 * @param[out] RgSchCmnDlRbAllocInfo* allocInfo
2822 PRIVATE Void rgSCHCmnDlCcchRarAlloc
2827 PRIVATE Void rgSCHCmnDlCcchRarAlloc(cell)
2831 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2833 TRC2(rgSCHCmnDlCcchRarAlloc);
2835 rgSCHCmnDlCcchRetx(cell, &cellSch->allocInfo);
2836 /* LTE_ADV_FLAG_REMOVED_START */
2837 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2839 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2841 /*eNodeB need to blank the subframe */
2845 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2850 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2852 /* LTE_ADV_FLAG_REMOVED_END */
2856 /*Added these function calls for processing CCCH SDU arriving
2857 * after guard timer expiry.Functions differ from above two functions
2858 * in using ueCb instead of raCb.*/
2859 rgSCHCmnDlCcchSduRetx(cell, &cellSch->allocInfo);
2860 /* LTE_ADV_FLAG_REMOVED_START */
2861 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2863 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2865 /*eNodeB need to blank the subframe */
2869 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2874 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2876 /* LTE_ADV_FLAG_REMOVED_END */
2880 if(cellSch->ul.msg3SchdIdx != RGSCH_INVALID_INFO)
2882 /* Do not schedule msg3 if there is a CFI change ongoing */
2883 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2885 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2889 /* LTE_ADV_FLAG_REMOVED_START */
2890 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2892 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2894 /*eNodeB need to blank the subframe */
2898 /* Do not schedule msg3 if there is a CFI change ongoing */
2899 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2901 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2907 /* Do not schedule msg3 if there is a CFI change ongoing */
2908 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2910 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2913 /* LTE_ADV_FLAG_REMOVED_END */
2921 * @brief Scheduling for CCCH SDU.
2925 * Function: rgSCHCmnCcchSduAlloc
2926 * Purpose: Scheduling for CCCH SDU
2928 * Invoked by: Scheduler
2930 * @param[in] RgSchCellCb* cell
2931 * @param[in] RgSchUeCb* ueCb
2932 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2937 PRIVATE S16 rgSCHCmnCcchSduAlloc
2941 RgSchCmnDlRbAllocInfo *allocInfo
2944 PRIVATE S16 rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)
2947 RgSchCmnDlRbAllocInfo *allocInfo;
2950 RgSchDlRbAlloc *rbAllocInfo;
2951 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2952 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
2954 TRC2(rgSCHCmnCcchSduAlloc);
2956 /* Return if subframe BW exhausted */
2957 if (allocInfo->ccchSduAlloc.ccchSduDlSf->bw <=
2958 allocInfo->ccchSduAlloc.ccchSduDlSf->bwAssigned)
2960 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2961 "bw<=bwAssigned for UEID:%d",ueCb->ueId);
2965 if (rgSCHDhmGetCcchSduHqProc(ueCb, cellSch->dl.time, &(ueDl->proc)) != ROK)
2967 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2968 "rgSCHDhmGetCcchSduHqProc failed UEID:%d",ueCb->ueId);
2972 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
2973 rbAllocInfo->dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
2975 if (rgSCHCmnCcchSduDedAlloc(cell, ueCb) != ROK)
2977 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
2978 rgSCHDhmRlsHqpTb(ueDl->proc, 0, FALSE);
2979 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2980 "rgSCHCmnCcchSduDedAlloc failed UEID:%d",ueCb->ueId);
2983 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduTxLst, &ueDl->proc->reqLnk);
2984 ueDl->proc->reqLnk.node = (PTR)ueDl->proc;
2985 allocInfo->ccchSduAlloc.ccchSduDlSf->schdCcchUe++;
2990 * @brief This function scheduler for downlink CCCH messages.
2994 * Function: rgSCHCmnDlCcchSduTx
2995 * Purpose: Scheduling for downlink CCCH
2997 * Invoked by: Scheduler
2999 * @param[in] RgSchCellCb *cell
3000 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3005 PRIVATE Void rgSCHCmnDlCcchSduTx
3008 RgSchCmnDlRbAllocInfo *allocInfo
3011 PRIVATE Void rgSCHCmnDlCcchSduTx(cell, allocInfo)
3013 RgSchCmnDlRbAllocInfo *allocInfo;
3018 RgSchCmnDlUe *ueCmnDl;
3019 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3021 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3023 TRC2(rgSCHCmnDlCcchSduTx);
3025 node = cell->ccchSduUeLst.first;
3028 if(cellSch->dl.maxCcchPerDlSf &&
3029 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3035 ueCb = (RgSchUeCb *)(node->node);
3036 ueCmnDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3038 /* Fix : syed postpone scheduling for this
3039 * until msg4 is done */
3040 /* Fix : syed RLC can erroneously send CCCH SDU BO
3041 * twice. Hence an extra guard to avoid if already
3042 * scheduled for RETX */
3043 if ((!(ueCb->dl.dlInactvMask & RG_HQENT_INACTIVE)) &&
3046 if ((rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)) != ROK)
3053 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"ERROR!! THIS SHOULD "
3054 "NEVER HAPPEN for UEID:%d", ueCb->ueId);
3064 * @brief This function scheduler for downlink CCCH messages.
3068 * Function: rgSCHCmnDlCcchTx
3069 * Purpose: Scheduling for downlink CCCH
3071 * Invoked by: Scheduler
3073 * @param[in] RgSchCellCb *cell
3074 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3079 PRIVATE Void rgSCHCmnDlCcchTx
3082 RgSchCmnDlRbAllocInfo *allocInfo
3085 PRIVATE Void rgSCHCmnDlCcchTx(cell, allocInfo)
3087 RgSchCmnDlRbAllocInfo *allocInfo;
3092 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3093 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3095 TRC2(rgSCHCmnDlCcchTx);
3097 node = cell->raInfo.toBeSchdLst.first;
3100 if(cellSch->dl.maxCcchPerDlSf &&
3101 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3108 raCb = (RgSchRaCb *)(node->node);
3110 /* Address allocation for this UE for MSG 4 */
3111 /* Allocation for Msg4 */
3112 if ((rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)) != ROK)
3123 * @brief This function scheduler for downlink CCCH messages.
3127 * Function: rgSCHCmnDlCcchSduRetx
3128 * Purpose: Scheduling for downlink CCCH
3130 * Invoked by: Scheduler
3132 * @param[in] RgSchCellCb *cell
3133 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3138 PRIVATE Void rgSCHCmnDlCcchSduRetx
3141 RgSchCmnDlRbAllocInfo *allocInfo
3144 PRIVATE Void rgSCHCmnDlCcchSduRetx(cell, allocInfo)
3146 RgSchCmnDlRbAllocInfo *allocInfo;
3149 RgSchDlRbAlloc *rbAllocInfo;
3151 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3153 RgSchDlHqProcCb *hqP;
3156 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3158 TRC2(rgSCHCmnDlCcchSduRetx);
3160 node = cellSch->dl.ccchSduRetxLst.first;
3163 if(cellSch->dl.maxCcchPerDlSf &&
3164 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3171 hqP = (RgSchDlHqProcCb *)(node->node);
3174 /* DwPts Scheduling Changes Start */
3176 if (rgSCHCmnRetxAvoidTdd(allocInfo->ccchSduAlloc.ccchSduDlSf,
3182 /* DwPts Scheduling Changes End */
3184 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3188 ueCb = (RgSchUeCb*)(hqP->hqE->ue);
3189 ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3191 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
3192 /* Fill RB Alloc Info */
3193 rbAllocInfo->dlSf = dlSf;
3194 rbAllocInfo->tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3195 rbAllocInfo->rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3196 /* Fix : syed iMcs setting did not correspond to RETX */
3197 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3198 rbAllocInfo->tbInfo[0].imcs);
3199 rbAllocInfo->rnti = ueCb->ueId;
3200 rbAllocInfo->tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3201 /* Fix : syed Copying info in entirety without depending on stale TX information */
3202 rbAllocInfo->tbInfo[0].tbCb = &hqP->tbInfo[0];
3203 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
3204 /* Fix : syed Assigning proc to scratchpad */
3207 retxBw += rbAllocInfo->rbsReq;
3209 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduRetxLst, \
3211 hqP->reqLnk.node = (PTR)hqP;
3215 dlSf->bwAssigned += retxBw;
3221 * @brief This function scheduler for downlink CCCH messages.
3225 * Function: rgSCHCmnDlCcchRetx
3226 * Purpose: Scheduling for downlink CCCH
3228 * Invoked by: Scheduler
3230 * @param[in] RgSchCellCb *cell
3231 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3236 PRIVATE Void rgSCHCmnDlCcchRetx
3239 RgSchCmnDlRbAllocInfo *allocInfo
3242 PRIVATE Void rgSCHCmnDlCcchRetx(cell, allocInfo)
3244 RgSchCmnDlRbAllocInfo *allocInfo;
3248 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3250 RgSchDlHqProcCb *hqP;
3252 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3254 TRC2(rgSCHCmnDlCcchRetx);
3256 node = cellSch->dl.msg4RetxLst.first;
3259 if(cellSch->dl.maxCcchPerDlSf &&
3260 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3266 hqP = (RgSchDlHqProcCb *)(node->node);
3270 /* DwPts Scheduling Changes Start */
3272 if (rgSCHCmnRetxAvoidTdd(allocInfo->msg4Alloc.msg4DlSf,
3278 /* DwPts Scheduling Changes End */
3280 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3284 raCb = (RgSchRaCb*)(hqP->hqE->raCb);
3285 /* Fill RB Alloc Info */
3286 raCb->rbAllocInfo.dlSf = dlSf;
3287 raCb->rbAllocInfo.tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3288 raCb->rbAllocInfo.rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3289 /* Fix : syed iMcs setting did not correspond to RETX */
3290 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3291 raCb->rbAllocInfo.tbInfo[0].imcs);
3292 raCb->rbAllocInfo.rnti = raCb->tmpCrnti;
3293 raCb->rbAllocInfo.tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3294 /* Fix; syed Copying info in entirety without depending on stale TX information */
3295 raCb->rbAllocInfo.tbInfo[0].tbCb = &hqP->tbInfo[0];
3296 raCb->rbAllocInfo.tbInfo[0].schdlngForTb = TRUE;
3298 retxBw += raCb->rbAllocInfo.rbsReq;
3300 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4RetxLst, \
3302 hqP->reqLnk.node = (PTR)hqP;
3306 dlSf->bwAssigned += retxBw;
3312 * @brief This function implements scheduler DL allocation for
3313 * for broadcast (on PDSCH) and paging.
3317 * Function: rgSCHCmnDlBcchPcch
3318 * Purpose: This function implements scheduler for DL allocation
3319 * for broadcast (on PDSCH) and paging.
3321 * Invoked by: Scheduler
3323 * @param[in] RgSchCellCb* cell
3329 PRIVATE Void rgSCHCmnDlBcchPcch
3332 RgSchCmnDlRbAllocInfo *allocInfo,
3333 RgInfSfAlloc *subfrmAlloc
3336 PRIVATE Void rgSCHCmnDlBcchPcch(cell, allocInfo, subfrmAlloc)
3338 RgSchCmnDlRbAllocInfo *allocInfo;
3339 RgInfSfAlloc *subfrmAlloc;
3342 CmLteTimingInfo frm;
3344 RgSchClcDlLcCb *pcch;
3348 RgSchClcDlLcCb *bcch, *bch;
3349 #endif/*RGR_SI_SCH*/
3352 TRC2(rgSCHCmnDlBcchPcch);
3354 frm = cell->crntTime;
3356 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
3357 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
3358 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
3360 // RGSCH_SUBFRAME_INDEX(frm);
3361 //RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
3364 /* Compute the subframe for which allocation is being made */
3365 /* essentially, we need pointer to the dl frame for this subframe */
3366 sf = rgSCHUtlSubFrmGet(cell, frm);
3370 bch = rgSCHDbmGetBcchOnBch(cell);
3371 #if (ERRCLASS & ERRCLS_DEBUG)
3374 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on BCH is not configured");
3378 if (bch->boLst.first != NULLP)
3380 bo = (RgSchClcBoRpt *)(bch->boLst.first->node);
3381 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3383 sf->bch.tbSize = bo->bo;
3384 cmLListDelFrm(&bch->boLst, bch->boLst.first);
3385 /* ccpu00117052 - MOD - Passing double pointer
3386 for proper NULLP assignment*/
3387 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(*bo));
3388 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, bch->lcId,TRUE);
3393 if ((frm.sfn % 4 == 0) && (frm.subframe == 0))
3398 allocInfo->bcchAlloc.schdFirst = FALSE;
3399 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
3400 #if (ERRCLASS & ERRCLS_DEBUG)
3403 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3407 if (bcch->boLst.first != NULLP)
3409 bo = (RgSchClcBoRpt *)(bcch->boLst.first->node);
3411 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3413 allocInfo->bcchAlloc.schdFirst = TRUE;
3414 /* Time to perform allocation for this BCCH transmission */
3415 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3419 if(!allocInfo->bcchAlloc.schdFirst)
3422 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
3423 #if (ERRCLASS & ERRCLS_DEBUG)
3426 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3430 lnk = bcch->boLst.first;
3431 while (lnk != NULLP)
3433 bo = (RgSchClcBoRpt *)(lnk->node);
3435 valid = rgSCHCmnChkInWin(frm, bo->timeToTx, bo->maxTimeToTx);
3439 bo->i = RGSCH_CALC_SF_DIFF(frm, bo->timeToTx);
3440 /* Time to perform allocation for this BCCH transmission */
3441 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3446 valid = rgSCHCmnChkPastWin(frm, bo->maxTimeToTx);
3449 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
3450 /* ccpu00117052 - MOD - Passing double pointer
3451 for proper NULLP assignment*/
3452 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo,
3453 sizeof(RgSchClcBoRpt));
3459 rgSCHDlSiSched(cell, allocInfo, subfrmAlloc);
3460 #endif/*RGR_SI_SCH*/
3462 pcch = rgSCHDbmGetPcch(cell);
3466 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"PCCH on DLSCH is not configured");
3470 if (pcch->boLst.first != NULLP)
3472 bo = (RgSchClcBoRpt *)(pcch->boLst.first->node);
3474 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3476 /* Time to perform allocation for this PCCH transmission */
3477 rgSCHCmnClcAlloc(cell, sf, pcch, RGSCH_P_RNTI, allocInfo);
3485 * Fun: rgSCHCmnChkInWin
3487 * Desc: This function checks if frm occurs in window
3489 * Ret: TRUE - if in window
3494 * File: rg_sch_cmn.c
3498 PUBLIC Bool rgSCHCmnChkInWin
3500 CmLteTimingInfo frm,
3501 CmLteTimingInfo start,
3505 PUBLIC Bool rgSCHCmnChkInWin(frm, start, end)
3506 CmLteTimingInfo frm;
3507 CmLteTimingInfo start;
3508 CmLteTimingInfo end;
3513 TRC2(rgSCHCmnChkInWin);
3515 if (end.sfn > start.sfn)
3517 if (frm.sfn > start.sfn
3518 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3520 if (frm.sfn < end.sfn
3522 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3524 || (frm.sfn == end.sfn && frm.slot <= start.slot))
3531 /* Testing for wrap around, sfn wraparound check should be enough */
3532 else if (end.sfn < start.sfn)
3534 if (frm.sfn > start.sfn
3535 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3541 if (frm.sfn < end.sfn
3542 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3548 else /* start.sfn == end.sfn */
3550 if (frm.sfn == start.sfn
3551 && (frm.slot >= start.slot
3552 && frm.slot <= end.slot))
3559 } /* end of rgSCHCmnChkInWin*/
3563 * Fun: rgSCHCmnChkPastWin
3565 * Desc: This function checks if frm has gone past window edge
3567 * Ret: TRUE - if past window edge
3572 * File: rg_sch_cmn.c
3576 PUBLIC Bool rgSCHCmnChkPastWin
3578 CmLteTimingInfo frm,
3582 PUBLIC Bool rgSCHCmnChkPastWin(frm, end)
3583 CmLteTimingInfo frm;
3584 CmLteTimingInfo end;
3587 CmLteTimingInfo refFrm = end;
3590 TRC2(rgSCHCmnChkPastWin);
3592 RGSCH_INCR_FRAME(refFrm.sfn);
3593 RGSCH_INCR_SUB_FRAME(end, 1);
3594 pastWin = rgSCHCmnChkInWin(frm, end, refFrm);
3597 } /* end of rgSCHCmnChkPastWin*/
3600 * @brief This function implements allocation of the resources for common
3601 * channels BCCH, PCCH.
3605 * Function: rgSCHCmnClcAlloc
3606 * Purpose: This function implements selection of number of RBs based
3607 * the allowed grant for the service. It is also responsible
3608 * for selection of MCS for the transmission.
3610 * Invoked by: Scheduler
3612 * @param[in] RgSchCellCb *cell,
3613 * @param[in] RgSchDlSf *sf,
3614 * @param[in] RgSchClcDlLcCb *lch,
3615 * @param[in] U16 rnti,
3616 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3621 PRIVATE Void rgSCHCmnClcAlloc
3625 RgSchClcDlLcCb *lch,
3627 RgSchCmnDlRbAllocInfo *allocInfo
3630 PRIVATE Void rgSCHCmnClcAlloc(cell, sf, lch, rnti, allocInfo)
3633 RgSchClcDlLcCb *lch;
3635 RgSchCmnDlRbAllocInfo *allocInfo;
3638 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
3645 U8 cfi = cellDl->currCfi;
3648 TRC2(rgSCHCmnClcAlloc);
3650 bo = (RgSchClcBoRpt *)(lch->boLst.first->node);
3654 /* rgSCHCmnClcRbAllocForFxdTb(cell, bo->bo, cellDl->ccchCqi, &rb);*/
3655 if(cellDl->bitsPerRb==0)
3657 while ((rgTbSzTbl[0][0][rb]) < (tbs*8))
3665 rb = RGSCH_CEIL((tbs*8), cellDl->bitsPerRb);
3667 /* DwPTS Scheduling Changes Start */
3669 if(sf->sfType == RG_SCH_SPL_SF_DATA)
3671 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
3673 /* Calculate the less RE's because of DwPTS */
3674 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
3676 /* Increase number of RBs in Spl SF to compensate for lost REs */
3677 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
3680 /* DwPTS Scheduling Changes End */
3681 /*ccpu00115595- end*/
3682 /* additional check to see if required RBs
3683 * exceeds the available */
3684 if (rb > sf->bw - sf->bwAssigned)
3686 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"BW allocation "
3687 "failed for CRNTI:%d",rnti);
3691 /* Update the subframe Allocated BW field */
3692 sf->bwAssigned = sf->bwAssigned + rb;
3693 /* Fill in the BCCH/PCCH transmission info to the RBAllocInfo struct */
3694 if (rnti == RGSCH_SI_RNTI)
3696 allocInfo->bcchAlloc.rnti = rnti;
3697 allocInfo->bcchAlloc.dlSf = sf;
3698 allocInfo->bcchAlloc.tbInfo[0].bytesReq = tbs;
3699 allocInfo->bcchAlloc.rbsReq = rb;
3700 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
3701 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
3702 /* Nprb indication at PHY for common Ch */
3703 allocInfo->bcchAlloc.nPrb = bo->nPrb;
3707 allocInfo->pcchAlloc.rnti = rnti;
3708 allocInfo->pcchAlloc.dlSf = sf;
3709 allocInfo->pcchAlloc.tbInfo[0].bytesReq = tbs;
3710 allocInfo->pcchAlloc.rbsReq = rb;
3711 allocInfo->pcchAlloc.tbInfo[0].imcs = mcs;
3712 allocInfo->pcchAlloc.tbInfo[0].noLyr = 1;
3713 allocInfo->pcchAlloc.nPrb = bo->nPrb;
3720 * @brief This function implements PDCCH allocation for common channels.
3724 * Function: rgSCHCmnCmnPdcchAlloc
3725 * Purpose: This function implements allocation of PDCCH for a UE.
3726 * 1. This uses index 0 of PDCCH table for efficiency.
3727 * 2. Uses he candidate PDCCH count for the aggr level.
3728 * 3. Look for availability for each candidate and choose
3729 * the first one available.
3731 * Invoked by: Scheduler
3733 * @param[in] RgSchCellCb *cell
3734 * @param[in] RgSchDlSf *sf
3735 * @return RgSchPdcch *
3736 * -# NULLP when unsuccessful
3740 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc
3746 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc(cell, subFrm)
3751 CmLteAggrLvl aggrLvl;
3752 RgSchPdcchInfo *pdcchInfo;
3754 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3755 U8 numCce; /*store num CCEs based on
3756 aggregation level */
3757 TRC2(rgSCHCmnCmnPdcchAlloc);
3759 aggrLvl = cellSch->dl.cmnChAggrLvl;
3761 pdcchInfo = &(subFrm->pdcchInfo);
3763 /* Updating the no. of nCce in pdcchInfo, in case if CFI
3766 if(subFrm->nCce != pdcchInfo->nCce)
3768 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
3771 if(cell->nCce != pdcchInfo->nCce)
3773 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
3779 case CM_LTE_AGGR_LVL4:
3782 case CM_LTE_AGGR_LVL8:
3785 case CM_LTE_AGGR_LVL16:
3792 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
3795 pdcch->isSpsRnti = FALSE;
3797 /* Increment the CCE used counter in the current subframe */
3798 subFrm->cceCnt += numCce;
3799 pdcch->pdcchSearchSpace = RG_SCH_CMN_SEARCH_SPACE;
3804 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
3805 subFrm->isCceFailure = TRUE;
3807 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
3808 "PDCCH ERR: NO PDDCH AVAIL IN COMMON SEARCH SPACE aggr:%u",
3815 * @brief This function implements bandwidth allocation for common channels.
3819 * Function: rgSCHCmnClcRbAlloc
3820 * Purpose: This function implements bandwith allocation logic
3821 * for common control channels.
3823 * Invoked by: Scheduler
3825 * @param[in] RgSchCellCb* cell
3829 * @param[in] U32 *tbs
3830 * @param[in] U8 *mcs
3831 * @param[in] RgSchDlSf *sf
3837 PUBLIC Void rgSCHCmnClcRbAlloc
3850 PUBLIC Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, iTbs, isSpsBo)
3863 PRIVATE Void rgSCHCmnClcRbAlloc
3874 PRIVATE Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, sf)
3883 #endif /* LTEMAC_SPS */
3886 RgSchCmnTbSzEff *effTbl;
3889 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3890 U8 cfi = cellSch->dl.currCfi;
3892 TRC2(rgSCHCmnClcRbAlloc);
3894 /* first get the CQI to MCS table and determine the number of RBs */
3895 effTbl = (RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]);
3896 iTbsVal = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[cqi];
3897 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3899 /* Efficiency is number of bits per 1024 REs */
3900 eff = (*effTbl)[iTbsVal];
3902 /* Get the number of REs needed for this bo */
3903 noRes = ((bo * 8 * 1024) / eff );
3905 /* Get the number of RBs needed for this transmission */
3906 /* Number of RBs = No of REs / No of REs per RB */
3907 tmpRb = RGSCH_CEIL(noRes, cellSch->dl.noResPerRb[cfi]);
3908 /* KWORK_FIX: added check to see if rb has crossed maxRb*/
3909 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3910 if (tmpRb > cellSch->dl.maxDlBwPerUe)
3912 tmpRb = cellSch->dl.maxDlBwPerUe;
3914 while ((rgTbSzTbl[0][iTbsVal][tmpRb-1]/8) < bo &&
3915 (tmpRb < cellSch->dl.maxDlBwPerUe))
3918 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3920 *tbs = rgTbSzTbl[0][iTbsVal][tmpRb-1]/8;
3922 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3930 * @brief Scheduling for MSG4.
3934 * Function: rgSCHCmnMsg4Alloc
3935 * Purpose: Scheduling for MSG4
3937 * Invoked by: Scheduler
3939 * @param[in] RgSchCellCb* cell
3940 * @param[in] RgSchRaCb* raCb
3941 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3946 PRIVATE S16 rgSCHCmnMsg4Alloc
3950 RgSchCmnDlRbAllocInfo *allocInfo
3953 PRIVATE S16 rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)
3956 RgSchCmnDlRbAllocInfo *allocInfo;
3959 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3961 TRC2(rgSCHCmnMsg4Alloc);
3963 /* SR_RACH_STATS : MSG4 TO BE TXED */
3965 /* Return if subframe BW exhausted */
3966 if (allocInfo->msg4Alloc.msg4DlSf->bw <=
3967 allocInfo->msg4Alloc.msg4DlSf->bwAssigned)
3969 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId ,
3974 if (rgSCHDhmGetMsg4HqProc(raCb, cellSch->dl.time) != ROK)
3976 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3977 "rgSCHDhmGetMsg4HqProc failed");
3981 raCb->rbAllocInfo.dlSf = allocInfo->msg4Alloc.msg4DlSf;
3983 if (rgSCHCmnMsg4DedAlloc(cell, raCb) != ROK)
3985 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
3986 rgSCHDhmRlsHqpTb(raCb->dlHqE->msg4Proc, 0, FALSE);
3987 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3988 "rgSCHCmnMsg4DedAlloc failed.");
3991 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4TxLst, &raCb->dlHqE->msg4Proc->reqLnk);
3992 raCb->dlHqE->msg4Proc->reqLnk.node = (PTR)raCb->dlHqE->msg4Proc;
3993 allocInfo->msg4Alloc.msg4DlSf->schdCcchUe++;
4000 * @brief This function implements PDCCH allocation for an UE.
4004 * Function: PdcchAlloc
4005 * Purpose: This function implements allocation of PDCCH for an UE.
4006 * 1. Get the aggregation level for the CQI of the UE.
4007 * 2. Get the candidate PDCCH count for the aggr level.
4008 * 3. Look for availability for each candidate and choose
4009 * the first one available.
4011 * Invoked by: Scheduler
4016 * @param[in] dciFrmt
4017 * @return RgSchPdcch *
4018 * -# NULLP when unsuccessful
4022 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc
4028 TfuDciFormat dciFrmt,
4032 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc(cell, subFrm, cqi, dciFrmt, isDtx)
4037 TfuDciFormat dciFrmt;
4041 CmLteAggrLvl aggrLvl;
4042 RgSchPdcchInfo *pdcchInfo;
4045 TRC2(rgSCHCmnPdcchAlloc);
4047 /* 3.1 consider the selected DCI format size in determining the
4048 * aggregation level */
4049 //TODO_SID Need to update. Currently using 4 aggregation level
4050 aggrLvl = CM_LTE_AGGR_LVL2;//cellSch->dciAggrLvl[cqi][dciFrmt];
4053 if((dciFrmt == TFU_DCI_FORMAT_1A) &&
4054 ((ue) && (ue->allocCmnUlPdcch)) )
4056 pdcch = rgSCHCmnCmnPdcchAlloc(cell, subFrm);
4057 /* Since CRNTI Scrambled */
4060 pdcch->dciNumOfBits = ue->dciSize.cmnSize[dciFrmt];
4061 // prc_trace_format_string(PRC_TRACE_GROUP_PS, PRC_TRACE_INFO_LOW,"Forcing alloc in CMN search spc size %d fmt %d \n",
4062 // pdcch->dciNumOfBits, dciFrmt);
4068 /* Incrementing aggrLvl by 1 if it not AGGR_LVL8(MAX SIZE)
4069 * inorder to increse the redudancy bits for better decoding of UE */
4072 if (aggrLvl != CM_LTE_AGGR_LVL16)
4076 case CM_LTE_AGGR_LVL2:
4077 aggrLvl = CM_LTE_AGGR_LVL4;
4079 case CM_LTE_AGGR_LVL4:
4080 aggrLvl = CM_LTE_AGGR_LVL8;
4082 case CM_LTE_AGGR_LVL8:
4083 aggrLvl = CM_LTE_AGGR_LVL16;
4092 pdcchInfo = &subFrm->pdcchInfo;
4094 /* Updating the no. of nCce in pdcchInfo, in case if CFI
4097 if(subFrm->nCce != pdcchInfo->nCce)
4099 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
4102 if(cell->nCce != pdcchInfo->nCce)
4104 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
4108 if (pdcchInfo->nCce < (1 << (aggrLvl - 1)))
4110 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4111 subFrm->isCceFailure = TRUE;
4112 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4113 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4119 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
4121 /* SR_RACH_STATS : Reset isTBMsg4 */
4122 pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4= FALSE;
4123 pdcch->dci.u.format0Info.isSrGrant = FALSE;
4125 pdcch->isSpsRnti = FALSE;
4127 /* Increment the CCE used counter in the current subframe */
4128 subFrm->cceCnt += aggrLvl;
4129 pdcch->pdcchSearchSpace = RG_SCH_UE_SPECIFIC_SEARCH_SPACE;
4133 if (ue->cell != cell)
4135 /* Secondary Cell */
4136 //pdcch->dciNumOfBits = ue->dciSize.noUlCcSize[dciFrmt];
4137 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4142 //pdcch->dciNumOfBits = ue->dciSize.dedSize[dciFrmt];
4143 //TODO_SID Need to update dci size.
4144 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4150 pdcch->dciNumOfBits = cell->dciSize.size[dciFrmt];
4155 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4156 subFrm->isCceFailure = TRUE;
4158 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4159 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4166 * @brief This function implements BW allocation for CCCH SDU
4170 * Function: rgSCHCmnCcchSduDedAlloc
4171 * Purpose: Downlink bandwidth Allocation for CCCH SDU.
4173 * Invoked by: Scheduler
4175 * @param[in] RgSchCellCb* cell
4176 * @param[out] RgSchUeCb *ueCb
4181 PRIVATE S16 rgSCHCmnCcchSduDedAlloc
4187 PRIVATE S16 rgSCHCmnCcchSduDedAlloc(cell, ueCb)
4192 RgSchDlHqEnt *hqE = NULLP;
4194 RgSchDlRbAlloc *rbAllocinfo = NULLP;
4195 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4199 U8 cfi = cellDl->currCfi;
4202 TRC2(rgSCHCmnCcchSduDedAlloc);
4204 rbAllocinfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
4206 effBo = ueCb->dlCcchInfo.bo + RGSCH_CCCH_SDU_HDRSIZE;
4209 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4210 &rbAllocinfo->tbInfo[0].bytesReq,
4211 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4212 #else /* LTEMAC_SPS */
4213 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4214 &rbAllocinfo->tbInfo[0].bytesReq,\
4215 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4217 #endif /* LTEMAC_SPS */
4220 /* Cannot exceed the total number of RBs in the cell */
4221 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4222 rbAllocinfo->dlSf->bwAssigned)))
4224 /* Check if atleast one allocation was possible.
4225 This may be the case where the Bw is very less and
4226 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4227 if (rbAllocinfo->dlSf->bwAssigned == 0)
4229 numRb = rbAllocinfo->dlSf->bw;
4230 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4231 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4235 rbAllocinfo->rbsReq = numRb;
4236 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4237 /* DwPTS Scheduling Changes Start */
4239 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4241 rbAllocinfo->tbInfo[0].bytesReq =
4242 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1,cfi);
4245 /* DwPTS Scheduling Changes End */
4246 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4254 /* Update the subframe Allocated BW field */
4255 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4256 rbAllocinfo->rbsReq;
4257 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
4258 rbAllocinfo->tbInfo[0].tbCb = &hqE->ccchSduProc->tbInfo[0];
4259 rbAllocinfo->rnti = ueCb->ueId;
4260 rbAllocinfo->tbInfo[0].noLyr = 1;
4267 * @brief This function implements BW allocation for MSG4
4271 * Function: rgSCHCmnMsg4DedAlloc
4272 * Purpose: Downlink bandwidth Allocation for MSG4.
4274 * Invoked by: Scheduler
4276 * @param[in] RgSchCellCb* cell
4277 * @param[out] RgSchRaCb *raCb
4282 PRIVATE S16 rgSCHCmnMsg4DedAlloc
4288 PRIVATE S16 rgSCHCmnMsg4DedAlloc(cell, raCb)
4294 RgSchDlRbAlloc *rbAllocinfo = &raCb->rbAllocInfo;
4298 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4299 U8 cfi = cellDl->currCfi;
4302 TRC2(rgSCHCmnMsg4DedAlloc);
4304 effBo = raCb->dlCcchInfo.bo + RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE;
4307 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4308 &rbAllocinfo->tbInfo[0].bytesReq,\
4309 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4310 #else /* LTEMAC_SPS */
4311 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4312 &rbAllocinfo->tbInfo[0].bytesReq,\
4313 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4315 #endif /* LTEMAC_SPS */
4318 /* Cannot exceed the total number of RBs in the cell */
4319 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4320 rbAllocinfo->dlSf->bwAssigned)))
4322 /* Check if atleast one allocation was possible.
4323 This may be the case where the Bw is very less and
4324 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4325 if (rbAllocinfo->dlSf->bwAssigned == 0)
4327 numRb = rbAllocinfo->dlSf->bw;
4328 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4329 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4333 rbAllocinfo->rbsReq = numRb;
4334 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4335 /* DwPTS Scheduling Changes Start */
4337 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4339 rbAllocinfo->tbInfo[0].bytesReq =
4340 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1, cfi);
4343 /* DwPTS Scheduling Changes End */
4344 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4352 /* Update the subframe Allocated BW field */
4353 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4354 rbAllocinfo->rbsReq;
4355 rbAllocinfo->rnti = raCb->tmpCrnti;
4356 rbAllocinfo->tbInfo[0].tbCb = &raCb->dlHqE->msg4Proc->tbInfo[0];
4357 rbAllocinfo->tbInfo[0].schdlngForTb = TRUE;
4358 rbAllocinfo->tbInfo[0].noLyr = 1;
4365 * @brief This function implements scheduling for RA Response.
4369 * Function: rgSCHCmnDlRaRsp
4370 * Purpose: Downlink scheduling for RA responses.
4372 * Invoked by: Scheduler
4374 * @param[in] RgSchCellCb* cell
4379 PRIVATE Void rgSCHCmnDlRaRsp
4382 RgSchCmnDlRbAllocInfo *allocInfo
4385 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4387 RgSchCmnDlRbAllocInfo *allocInfo;
4390 CmLteTimingInfo frm;
4391 CmLteTimingInfo schFrm;
4397 RgSchTddRachRspLst *rachRsp;
4398 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
4402 TRC2(rgSCHCmnDlRaRsp);
4404 frm = cell->crntTime;
4405 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4407 /* Compute the subframe for which allocation is being made */
4408 /* essentially, we need pointer to the dl frame for this subframe */
4409 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4411 /* Get the RACH Response scheduling related information
4412 * for the subframe with RA index */
4413 raIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][frm.subframe]-1;
4415 rachRsp = &cell->rachRspLst[raIdx];
4417 for(sfnIdx = 0; sfnIdx < rachRsp->numRadiofrms; sfnIdx++)
4419 /* For all scheduled RACH Responses in SFNs */
4421 RG_SCH_CMN_DECR_FRAME(schFrm.sfn, rachRsp->rachRsp[sfnIdx].sfnOffset);
4422 /* For all scheduled RACH Responses in subframes */
4424 subfrmIdx < rachRsp->rachRsp[sfnIdx].numSubfrms; subfrmIdx++)
4426 schFrm.subframe = rachRsp->rachRsp[sfnIdx].subframe[subfrmIdx];
4427 /* compute the last RA RNTI used in the previous subframe */
4428 raIdx = (((schFrm.sfn % cell->raInfo.maxRaSize) * \
4429 RGSCH_NUM_SUB_FRAMES * RGSCH_MAX_RA_RNTI_PER_SUBFRM) \
4432 /* For all RA RNTIs within a subframe */
4434 for(i=0; (i < RGSCH_MAX_RA_RNTI_PER_SUBFRM) && \
4435 (noRaRnti < RGSCH_MAX_TDD_RA_RSP_ALLOC); i++)
4437 rarnti = (schFrm.subframe + RGSCH_NUM_SUB_FRAMES*i + 1);
4438 rntiIdx = (raIdx + RGSCH_NUM_SUB_FRAMES*i);
4440 if (cell->raInfo.raReqLst[rntiIdx].first != NULLP)
4442 /* compute the next RA RNTI */
4443 if (rgSCHCmnRaRspAlloc(cell, subFrm, rntiIdx,
4444 rarnti, noRaRnti, allocInfo) != ROK)
4446 /* The resources are exhausted */
4460 * @brief This function implements scheduling for RA Response.
4464 * Function: rgSCHCmnDlRaRsp
4465 * Purpose: Downlink scheduling for RA responses.
4467 * Invoked by: Scheduler
4469 * @param[in] RgSchCellCb* cell
4470 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4475 PRIVATE Void rgSCHCmnDlRaRsp //FDD
4478 RgSchCmnDlRbAllocInfo *allocInfo
4481 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4483 RgSchCmnDlRbAllocInfo *allocInfo;
4486 CmLteTimingInfo frm;
4487 CmLteTimingInfo winStartFrm;
4493 RgSchCmnCell *sched;
4495 TRC2(rgSCHCmnDlRaRsp);
4497 frm = cell->crntTime;
4498 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4500 /* Compute the subframe for which allocation is being made */
4501 /* essentially, we need pointer to the dl frame for this subframe */
4502 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4503 sched = RG_SCH_CMN_GET_CELL(cell);
4505 /* ccpu00132523 - Window Start calculated by considering RAR window size,
4506 * RAR Wait period, Subframes occuppied for respective preamble format*/
4507 winGap = (sched->dl.numRaSubFrms-1) + (cell->rachCfg.raWinSize-1)
4508 +RGSCH_RARSP_WAIT_PERIOD;
4510 /* Window starting occassion is retrieved using the gap and tried to
4511 * fit to the size of raReqLst array*/
4512 RGSCHDECRFRMCRNTTIME(frm, winStartFrm, winGap);
4514 //5G_TODO TIMING update. Need to check
4515 winStartIdx = (winStartFrm.sfn & 1) * RGSCH_MAX_RA_RNTI+ winStartFrm.slot;
4517 for(i = 0; ((i < cell->rachCfg.raWinSize) && (noRaRnti < RG_SCH_CMN_MAX_CMN_PDCCH)); i++)
4519 raIdx = (winStartIdx + i) % RGSCH_RAREQ_ARRAY_SIZE;
4521 if (cell->raInfo.raReqLst[raIdx].first != NULLP)
4523 allocInfo->raRspAlloc[noRaRnti].biEstmt = \
4524 (!i * RGSCH_ONE_BIHDR_SIZE);
4525 rarnti = raIdx % RGSCH_MAX_RA_RNTI+ 1;
4526 if (rgSCHCmnRaRspAlloc(cell, subFrm, raIdx,
4527 rarnti, noRaRnti, allocInfo) != ROK)
4529 /* The resources are exhausted */
4532 /* ccpu00132523- If all the RAP ids are not scheduled then need not
4533 * proceed for next RA RNTIs*/
4534 if(allocInfo->raRspAlloc[noRaRnti].numRapids < cell->raInfo.raReqLst[raIdx].count)
4538 noRaRnti++; /* Max of RG_SCH_CMN_MAX_CMN_PDCCH RARNTIs
4539 for response allocation */
4548 * @brief This function allocates the resources for an RARNTI.
4552 * Function: rgSCHCmnRaRspAlloc
4553 * Purpose: Allocate resources to a RARNTI.
4554 * 0. Allocate PDCCH for sending the response.
4555 * 1. Locate the number of RA requests pending for the RARNTI.
4556 * 2. Compute the size of data to be built.
4557 * 3. Using common channel CQI, compute the number of RBs.
4559 * Invoked by: Scheduler
4561 * @param[in] RgSchCellCb *cell,
4562 * @param[in] RgSchDlSf *subFrm,
4563 * @param[in] U16 rarnti,
4564 * @param[in] U8 noRaRnti
4565 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4570 PRIVATE S16 rgSCHCmnRaRspAlloc
4577 RgSchCmnDlRbAllocInfo *allocInfo
4580 PRIVATE S16 rgSCHCmnRaRspAlloc(cell,subFrm,raIndex,rarnti,noRaRnti,allocInfo)
4586 RgSchCmnDlRbAllocInfo *allocInfo;
4589 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4590 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4594 /*ccpu00116700,ccpu00116708- Corrected the wrong type for mcs*/
4597 /* RACH handling related changes */
4598 Bool isAlloc = FALSE;
4599 static U8 schdNumRapid = 0;
4605 U8 cfi = cellDl->currCfi;
4608 TRC2(rgSCHCmnRaRspAlloc);
4613 /* ccpu00132523: Resetting the schdRap Id count in every scheduling subframe*/
4620 if (subFrm->bw == subFrm->bwAssigned)
4622 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4623 "bw == bwAssigned RARNTI:%d",rarnti);
4627 reqLst = &cell->raInfo.raReqLst[raIndex];
4628 if (reqLst->count == 0)
4630 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4631 "reqLst Count=0 RARNTI:%d",rarnti);
4634 remNumRapid = reqLst->count;
4637 /* Limit number of rach rsps to maxMsg3PerUlsf */
4638 if ( schdNumRapid+remNumRapid > cellUl->maxMsg3PerUlSf )
4640 remNumRapid = cellUl->maxMsg3PerUlSf-schdNumRapid;
4646 /* Try allocating for as many RAPIDs as possible */
4647 /* BI sub-header size to the tbSize requirement */
4648 noBytes = RGSCH_GET_RAR_BYTES(remNumRapid) +\
4649 allocInfo->raRspAlloc[noRaRnti].biEstmt;
4650 if ((allwdTbSz = rgSCHUtlGetAllwdCchTbSz(noBytes*8, &nPrb, &mcs)) == -1)
4656 /* rgSCHCmnClcRbAllocForFxdTb(cell, allwdTbSz/8, cellDl->ccchCqi, &rb);*/
4657 if(cellDl->bitsPerRb==0)
4659 while ((rgTbSzTbl[0][0][rb]) <(U32) allwdTbSz)
4667 rb = RGSCH_CEIL(allwdTbSz, cellDl->bitsPerRb);
4669 /* DwPTS Scheduling Changes Start */
4671 if (subFrm->sfType == RG_SCH_SPL_SF_DATA)
4673 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
4675 /* Calculate the less RE's because of DwPTS */
4676 lostRe = rb * (cellDl->noResPerRb[cfi] -
4677 cellDl->numReDwPts[cfi]);
4679 /* Increase number of RBs in Spl SF to compensate for lost REs */
4680 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
4683 /* DwPTS Scheduling Changes End */
4685 /*ccpu00115595- end*/
4686 if (rb > subFrm->bw - subFrm->bwAssigned)
4691 /* Allocation succeeded for 'remNumRapid' */
4694 printf("\n!!!RAR alloc noBytes:%u,allwdTbSz:%u,tbs:%u,rb:%u\n",
4695 noBytes,allwdTbSz,tbs,rb);
4700 RLOG_ARG0(L_INFO,DBG_CELLID,cell->cellId,"BW alloc Failed");
4704 subFrm->bwAssigned = subFrm->bwAssigned + rb;
4706 /* Fill AllocInfo structure */
4707 allocInfo->raRspAlloc[noRaRnti].rnti = rarnti;
4708 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].bytesReq = tbs;
4709 allocInfo->raRspAlloc[noRaRnti].rbsReq = rb;
4710 allocInfo->raRspAlloc[noRaRnti].dlSf = subFrm;
4711 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].imcs = mcs;
4712 allocInfo->raRspAlloc[noRaRnti].raIndex = raIndex;
4713 /* RACH changes for multiple RAPID handling */
4714 allocInfo->raRspAlloc[noRaRnti].numRapids = remNumRapid;
4715 allocInfo->raRspAlloc[noRaRnti].nPrb = nPrb;
4716 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].noLyr = 1;
4717 allocInfo->raRspAlloc[noRaRnti].vrbgReq = RGSCH_CEIL(nPrb,MAX_5GTF_VRBG_SIZE);
4718 schdNumRapid += remNumRapid;
4722 /***********************************************************
4724 * Func : rgSCHCmnUlAllocFillRbInfo
4726 * Desc : Fills the start RB and the number of RBs for
4727 * uplink allocation.
4735 **********************************************************/
4737 PUBLIC Void rgSCHCmnUlAllocFillRbInfo
4744 PUBLIC Void rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc)
4747 RgSchUlAlloc *alloc;
4750 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4751 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4752 U8 cfi = cellDl->currCfi;
4755 TRC2(rgSCHCmnUlAllocFillRbInfo);
4756 alloc->grnt.rbStart = (alloc->sbStart * cellUl->sbSize) +
4757 cell->dynCfiCb.bwInfo[cfi].startRb;
4759 /* Num RBs = numSbAllocated * sbSize - less RBs in the last SB */
4760 alloc->grnt.numRb = (alloc->numSb * cellUl->sbSize);
4766 * @brief Grant request for Msg3.
4770 * Function : rgSCHCmnMsg3GrntReq
4772 * This is invoked by downlink scheduler to request allocation
4775 * - Attempt to allocate msg3 in the current msg3 subframe
4776 * Allocation attempt based on whether preamble is from group A
4777 * and the value of MESSAGE_SIZE_GROUP_A
4778 * - Link allocation with passed RNTI and msg3 HARQ process
4779 * - Set the HARQ process ID (*hqProcIdRef)
4781 * @param[in] RgSchCellCb *cell
4782 * @param[in] CmLteRnti rnti
4783 * @param[in] Bool preamGrpA
4784 * @param[in] RgSchUlHqProcCb *hqProc
4785 * @param[out] RgSchUlAlloc **ulAllocRef
4786 * @param[out] U8 *hqProcIdRef
4790 PRIVATE Void rgSCHCmnMsg3GrntReq
4795 RgSchUlHqProcCb *hqProc,
4796 RgSchUlAlloc **ulAllocRef,
4800 PRIVATE Void rgSCHCmnMsg3GrntReq(cell, rnti, preamGrpA, hqProc,
4801 ulAllocRef, hqProcIdRef)
4805 RgSchUlHqProcCb *hqProc;
4806 RgSchUlAlloc **ulAllocRef;
4810 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4811 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
4813 RgSchUlAlloc *alloc;
4817 TRC2(rgSCHCmnMsg3GrntReq);
4819 *ulAllocRef = NULLP;
4821 /* Fix: ccpu00120610 Use remAllocs from subframe during msg3 allocation */
4822 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
4826 if (preamGrpA == FALSE)
4828 numSb = cellUl->ra.prmblBNumSb;
4829 iMcs = cellUl->ra.prmblBIMcs;
4833 numSb = cellUl->ra.prmblANumSb;
4834 iMcs = cellUl->ra.prmblAIMcs;
4837 if ((hole = rgSCHUtlUlHoleFirst(sf)) != NULLP)
4839 if(*sf->allocCountRef == 0)
4841 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4842 /* Reinitialize the hole */
4843 if (sf->holeDb->count == 1 && (hole->start == 0)) /* Sanity check of holeDb */
4845 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4846 /* Re-Initialize available subbands because of CFI change*/
4847 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4851 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4852 "Error! holeDb sanity check failed RNTI:%d",rnti);
4855 if (numSb <= hole->num)
4858 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
4859 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
4860 alloc->grnt.iMcs = iMcs;
4861 alloc->grnt.iMcsCrnt = iMcs;
4862 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
4863 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
4864 /* To include the length and ModOrder in DataRecp Req.*/
4865 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
4866 RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
4867 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
4868 alloc->grnt.nDmrs = 0;
4869 alloc->grnt.hop = 0;
4870 alloc->grnt.delayBit = 0;
4871 alloc->grnt.isRtx = FALSE;
4872 *ulAllocRef = alloc;
4873 *hqProcIdRef = (cellUl->msg3SchdHqProcIdx);
4874 hqProc->procId = *hqProcIdRef;
4875 hqProc->ulSfIdx = (cellUl->msg3SchdIdx);
4878 alloc->pdcch = FALSE;
4879 alloc->forMsg3 = TRUE;
4880 alloc->hqProc = hqProc;
4881 rgSCHUhmNewTx(hqProc, (U8)(cell->rachCfg.maxMsg3Tx - 1), alloc);
4882 //RLOG_ARG4(L_DEBUG,DBG_CELLID,cell->cellId,
4884 "\nRNTI:%d MSG3 ALLOC proc(%p)procId(%d)schdIdx(%d)\n",
4886 ((PTR)alloc->hqProc),
4887 alloc->hqProc->procId,
4888 alloc->hqProc->ulSfIdx);
4889 RLOG_ARG2(L_DEBUG,DBG_CELLID,cell->cellId,
4890 "alloc(%p)maxMsg3Tx(%d)",
4892 cell->rachCfg.maxMsg3Tx);
4901 * @brief This function determines the allocation limits and
4902 * parameters that aid in DL scheduling.
4906 * Function: rgSCHCmnDlSetUeAllocLmt
4907 * Purpose: This function determines the Maximum RBs
4908 * a UE is eligible to get based on softbuffer
4909 * limitation and cell->>>maxDlBwPerUe. The Codeword
4910 * specific parameters like iTbs, eff and noLyrs
4911 * are also set in this function. This function
4912 * is called while UE configuration and UeDlCqiInd.
4914 * Invoked by: Scheduler
4916 * @param[in] RgSchCellCb *cellCb
4917 * @param[in] RgSchCmnDlUe *ueDl
4922 PRIVATE Void rgSCHCmnDlSetUeAllocLmt
4929 PRIVATE Void rgSCHCmnDlSetUeAllocLmt(cell, ueDl, isEmtcUe)
4937 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
4938 U8 cfi = cellSch->dl.currCfi;
4940 TRC2(rgSCHCmnDlSetUeAllocLmt);
4943 if(TRUE == isEmtcUe)
4945 /* ITbs for CW0 for 1 Layer Tx */
4946 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4947 [ueDl->mimoInfo.cwInfo[0].cqi];
4948 /* ITbs for CW0 for 2 Layer Tx */
4949 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4950 [ueDl->mimoInfo.cwInfo[0].cqi];
4951 /* Eff for CW0 for 1 Layer Tx */
4952 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4953 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4954 /* Eff for CW0 for 2 Layer Tx */
4955 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4956 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4958 /* ITbs for CW1 for 1 Layer Tx */
4959 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4960 [ueDl->mimoInfo.cwInfo[1].cqi];
4961 /* ITbs for CW1 for 2 Layer Tx */
4962 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4963 [ueDl->mimoInfo.cwInfo[1].cqi];
4964 /* Eff for CW1 for 1 Layer Tx */
4965 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4966 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4967 /* Eff for CW1 for 2 Layer Tx */
4968 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4969 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4974 /* ITbs for CW0 for 1 Layer Tx */
4975 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4976 [ueDl->mimoInfo.cwInfo[0].cqi];
4977 /* ITbs for CW0 for 2 Layer Tx */
4978 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4979 [ueDl->mimoInfo.cwInfo[0].cqi];
4980 /* Eff for CW0 for 1 Layer Tx */
4981 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4982 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4983 /* Eff for CW0 for 2 Layer Tx */
4984 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4985 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4987 /* ITbs for CW1 for 1 Layer Tx */
4988 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4989 [ueDl->mimoInfo.cwInfo[1].cqi];
4990 /* ITbs for CW1 for 2 Layer Tx */
4991 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4992 [ueDl->mimoInfo.cwInfo[1].cqi];
4993 /* Eff for CW1 for 1 Layer Tx */
4994 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4995 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4996 /* Eff for CW1 for 2 Layer Tx */
4997 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4998 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
5002 // ueDl->laCb.cqiBasediTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0] * 100;
5004 /* Assigning noLyrs to each CW assuming optimal Spatial multiplexing
5006 (ueDl->mimoInfo.ri/2 == 0)? (ueDl->mimoInfo.cwInfo[0].noLyr = 1) : \
5007 (ueDl->mimoInfo.cwInfo[0].noLyr = ueDl->mimoInfo.ri/2);
5008 ueDl->mimoInfo.cwInfo[1].noLyr = ueDl->mimoInfo.ri - ueDl->mimoInfo.cwInfo[0].noLyr;
5009 /* rg002.101:ccpu00102106: correcting DL harq softbuffer limitation logic.
5010 * The maxTbSz is the maximum number of PHY bits a harq process can
5011 * hold. Hence we limit our allocation per harq process based on this.
5012 * Earlier implementation we misinterpreted the maxTbSz to be per UE
5013 * per TTI, but in fact it is per Harq per TTI. */
5014 /* rg002.101:ccpu00102106: cannot exceed the harq Tb Size
5015 * and harq Soft Bits limit.*/
5017 /* Considering iTbs corresponding to 2 layer transmission for
5018 * codeword0(approximation) and the maxLayers supported by
5019 * this UE at this point of time. */
5020 RG_SCH_CMN_TBS_TO_MODODR(ueDl->mimoInfo.cwInfo[0].iTbs[1], modOrder);
5022 /* Bits/modOrder gives #REs, #REs/noResPerRb gives #RBs */
5023 /* rg001.301 -MOD- [ccpu00119213] : avoiding wraparound */
5024 maxRb = ((ueDl->maxSbSz)/(cellSch->dl.noResPerRb[cfi] * modOrder *\
5025 ueDl->mimoInfo.ri));
5026 if (cellSch->dl.isDlFreqSel)
5028 /* Rounding off to left nearest multiple of RBG size */
5029 maxRb -= maxRb % cell->rbgSize;
5031 ueDl->maxRb = RGSCH_MIN(maxRb, cellSch->dl.maxDlBwPerUe);
5032 if (cellSch->dl.isDlFreqSel)
5034 /* Rounding off to right nearest multiple of RBG size */
5035 if (ueDl->maxRb % cell->rbgSize)
5037 ueDl->maxRb += (cell->rbgSize -
5038 (ueDl->maxRb % cell->rbgSize));
5042 /* Set the index of the cwInfo, which is better in terms of
5043 * efficiency. If RI<2, only 1 CW, hence btrCwIdx shall be 0 */
5044 if (ueDl->mimoInfo.ri < 2)
5046 ueDl->mimoInfo.btrCwIdx = 0;
5050 if (ueDl->mimoInfo.cwInfo[0].eff[ueDl->mimoInfo.cwInfo[0].noLyr-1] <\
5051 ueDl->mimoInfo.cwInfo[1].eff[ueDl->mimoInfo.cwInfo[1].noLyr-1])
5053 ueDl->mimoInfo.btrCwIdx = 1;
5057 ueDl->mimoInfo.btrCwIdx = 0;
5067 * @brief This function updates TX Scheme.
5071 * Function: rgSCHCheckAndSetTxScheme
5072 * Purpose: This function determines the Maximum RBs
5073 * a UE is eligible to get based on softbuffer
5074 * limitation and cell->>>maxDlBwPerUe. The Codeword
5075 * specific parameters like iTbs, eff and noLyrs
5076 * are also set in this function. This function
5077 * is called while UE configuration and UeDlCqiInd.
5079 * Invoked by: Scheduler
5081 * @param[in] RgSchCellCb *cell
5082 * @param[in] RgSchUeCb *ue
5087 PRIVATE Void rgSCHCheckAndSetTxScheme
5093 PRIVATE Void rgSCHCheckAndSetTxScheme(cell, ue)
5098 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5099 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue ,cell);
5100 U8 cfi = cellSch->dl.currCfi;
5105 TRC2(rgSCHCheckAndSetTxScheme);
5107 maxiTbs = (*(RgSchCmnCqiToTbs*)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
5108 [RG_SCH_CMN_MAX_CQI - 1];
5109 cqiBasediTbs = (ueDl->laCb[0].cqiBasediTbs)/100;
5110 actualiTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0];
5112 if((actualiTbs < RG_SCH_TXSCHEME_CHNG_ITBS_FACTOR) && (cqiBasediTbs >
5113 actualiTbs) && ((cqiBasediTbs - actualiTbs) > RG_SCH_TXSCHEME_CHNG_THRSHD))
5115 RG_SCH_CMN_SET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5118 if(actualiTbs >= maxiTbs)
5120 RG_SCH_CMN_UNSET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5127 * @brief This function determines the allocation limits and
5128 * parameters that aid in DL scheduling.
5132 * Function: rgSCHCmnDlSetUeAllocLmtLa
5133 * Purpose: This function determines the Maximum RBs
5134 * a UE is eligible to get based on softbuffer
5135 * limitation and cell->>>maxDlBwPerUe. The Codeword
5136 * specific parameters like iTbs, eff and noLyrs
5137 * are also set in this function. This function
5138 * is called while UE configuration and UeDlCqiInd.
5140 * Invoked by: Scheduler
5142 * @param[in] RgSchCellCb *cell
5143 * @param[in] RgSchUeCb *ue
5148 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa
5154 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa(cell, ue)
5162 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5163 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
5164 U8 cfi = cellSch->dl.currCfi;
5168 TRC2(rgSCHCmnDlSetUeAllocLmtLa);
5170 maxiTbs = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[RG_SCH_CMN_MAX_CQI - 1];
5171 if(ueDl->cqiFlag == TRUE)
5173 for(cwIdx=0; cwIdx < RG_SCH_CMN_MAX_CW_PER_UE; cwIdx++)
5177 /* Calcluating the reported iTbs for code word 0 */
5178 reportediTbs = ue->ue5gtfCb.mcs;
5180 iTbsNew = (S32) reportediTbs;
5182 if(!ueDl->laCb[cwIdx].notFirstCqi)
5184 /* This is the first CQI report from UE */
5185 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5186 ueDl->laCb[cwIdx].notFirstCqi = TRUE;
5188 else if ((RG_ITBS_DIFF(reportediTbs, ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0])) > 5)
5190 /* Ignore this iTBS report and mark that last iTBS report was */
5191 /* ignored so that subsequently we reset the LA algorithm */
5192 ueDl->laCb[cwIdx].lastiTbsIgnored = TRUE;
5193 ueDl->laCb[cwIdx].numLastiTbsIgnored++;
5194 if( ueDl->laCb[cwIdx].numLastiTbsIgnored > 10)
5196 /* CQI reported by UE is not catching up. Reset the LA algorithm */
5197 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5198 ueDl->laCb[cwIdx].deltaiTbs = 0;
5199 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5200 ueDl->laCb[cwIdx].numLastiTbsIgnored = 0;
5205 if (ueDl->laCb[cwIdx].lastiTbsIgnored != TRUE)
5207 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5208 (80 * ueDl->laCb[cwIdx].cqiBasediTbs))/100;
5212 /* Reset the LA as iTbs in use caught up with the value */
5213 /* reported by UE. */
5214 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5215 (80 * ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] * 100))/100;
5216 ueDl->laCb[cwIdx].deltaiTbs = 0;
5217 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5221 iTbsNew = (ueDl->laCb[cwIdx].cqiBasediTbs + ueDl->laCb[cwIdx].deltaiTbs)/100;
5223 RG_SCH_CHK_ITBS_RANGE(iTbsNew, maxiTbs);
5225 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] = RGSCH_MIN(iTbsNew, cell->thresholds.maxDlItbs);
5226 //ueDl->mimoInfo.cwInfo[cwIdx].iTbs[1] = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5228 ue->ue5gtfCb.mcs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5230 printf("reportediTbs[%d] cqiBasediTbs[%d] deltaiTbs[%d] iTbsNew[%d] mcs[%d] cwIdx[%d]\n",
5231 reportediTbs, ueDl->laCb[cwIdx].cqiBasediTbs, ueDl->laCb[cwIdx].deltaiTbs,
5232 iTbsNew, ue->ue5gtfCb.mcs, cwIdx);
5236 if((ue->mimoInfo.txMode != RGR_UE_TM_3) && (ue->mimoInfo.txMode != RGR_UE_TM_4))
5241 ueDl->cqiFlag = FALSE;
5248 /***********************************************************
5250 * Func : rgSCHCmnDlUeResetTemp
5252 * Desc : Reset whatever variables where temporarily used
5253 * during UE scheduling.
5261 **********************************************************/
5263 PUBLIC Void rgSCHCmnDlHqPResetTemp
5265 RgSchDlHqProcCb *hqP
5268 PUBLIC Void rgSCHCmnDlHqPResetTemp(hqP)
5269 RgSchDlHqProcCb *hqP;
5273 TRC2(rgSCHCmnDlHqPResetTemp);
5275 /* Fix: syed having a hqP added to Lists for RB assignment rather than
5276 * a UE, as adding UE was limiting handling some scenarios */
5277 hqP->reqLnk.node = (PTR)NULLP;
5278 hqP->schdLstLnk.node = (PTR)NULLP;
5281 } /* rgSCHCmnDlHqPResetTemp */
5283 /***********************************************************
5285 * Func : rgSCHCmnDlUeResetTemp
5287 * Desc : Reset whatever variables where temporarily used
5288 * during UE scheduling.
5296 **********************************************************/
5298 PUBLIC Void rgSCHCmnDlUeResetTemp
5301 RgSchDlHqProcCb *hqP
5304 PUBLIC Void rgSCHCmnDlUeResetTemp(ue, hqP)
5306 RgSchDlHqProcCb *hqP;
5309 RgSchDlRbAlloc *allocInfo;
5310 RgSchCmnDlUe *cmnUe = RG_SCH_CMN_GET_DL_UE(ue,hqP->hqE->cell);
5315 TRC2(rgSCHCmnDlUeResetTemp);
5317 /* Fix : syed check for UE's existence was useless.
5318 * Instead we need to check that reset is done only for the
5319 * information of a scheduled harq proc, which is cmnUe->proc.
5320 * Reset should not be done for non-scheduled hqP */
5321 if((cmnUe->proc == hqP) || (cmnUe->proc == NULLP))
5323 cmnUe->proc = NULLP;
5324 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, hqP->hqE->cell);
5326 tmpCb = allocInfo->laaCb;
5328 cmMemset((U8 *)allocInfo, (U8)0, sizeof(RgSchDlRbAlloc));
5329 allocInfo->rnti = ue->ueId;
5331 allocInfo->laaCb = tmpCb;
5333 /* Fix: syed moving this to a common function for both scheduled
5334 * and non-scheduled UEs */
5335 cmnUe->outStndAlloc = 0;
5337 rgSCHCmnDlHqPResetTemp(hqP);
5340 } /* rgSCHCmnDlUeResetTemp */
5342 /***********************************************************
5344 * Func : rgSCHCmnUlUeResetTemp
5346 * Desc : Reset whatever variables where temporarily used
5347 * during UE scheduling.
5355 **********************************************************/
5357 PUBLIC Void rgSCHCmnUlUeResetTemp
5363 PUBLIC Void rgSCHCmnUlUeResetTemp(cell, ue)
5368 RgSchCmnUlUe *cmnUlUe = RG_SCH_CMN_GET_UL_UE(ue,cell);
5370 TRC2(rgSCHCmnUlUeResetTemp);
5372 cmMemset((U8 *)&cmnUlUe->alloc, (U8)0, sizeof(cmnUlUe->alloc));
5375 } /* rgSCHCmnUlUeResetTemp */
5380 * @brief This function fills the PDCCH information from dlProc.
5384 * Function: rgSCHCmnFillPdcch
5385 * Purpose: This function fills in the PDCCH information
5386 * obtained from the RgSchDlRbAlloc
5387 * during common channel scheduling(P, SI, RA - RNTI's).
5389 * Invoked by: Downlink Scheduler
5391 * @param[out] RgSchPdcch* pdcch
5392 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5397 PUBLIC Void rgSCHCmnFillPdcch
5401 RgSchDlRbAlloc *rbAllocInfo
5404 PUBLIC Void rgSCHCmnFillPdcch(cell, pdcch, rbAllocInfo)
5407 RgSchDlRbAlloc *rbAllocInfo;
5411 TRC2(rgSCHCmnFillPdcch);
5413 /* common channel pdcch filling,
5414 * only 1A and Local is supported */
5415 pdcch->rnti = rbAllocInfo->rnti;
5416 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
5417 switch(rbAllocInfo->dciFormat)
5419 #ifdef RG_5GTF /* ANOOP: ToDo: DCI format B1/B2 filling */
5420 case TFU_DCI_FORMAT_B1:
5423 pdcch->dci.u.formatB1Info.formatType = 0;
5424 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].cmnGrnt.xPDSCHRange;
5425 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].cmnGrnt.rbAssign;
5426 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = 0;
5427 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5428 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = 0;
5429 //pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].ndi;
5430 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].cmnGrnt.rv;
5431 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5432 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5433 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5434 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5435 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5436 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5437 //TODO_SID: Need to update
5438 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5439 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5440 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5441 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5442 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5443 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5444 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].cmnGrnt.SCID;
5445 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5446 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5447 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5449 break; /* case TFU_DCI_FORMAT_B1: */
5452 case TFU_DCI_FORMAT_B2:
5454 //printf(" RG_5GTF:: Pdcch filling with DCI format B2\n");
5456 break; /* case TFU_DCI_FORMAT_B2: */
5459 case TFU_DCI_FORMAT_1A:
5460 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
5462 /*Nprb indication at PHY for common Ch
5463 *setting least significant bit of tpc field to 1 if
5464 nPrb=3 and 0 otherwise. */
5465 if (rbAllocInfo->nPrb == 3)
5467 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 1;
5471 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 0;
5473 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
5474 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
5475 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
5476 rbAllocInfo->tbInfo[0].imcs;
5477 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = 0;
5478 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = 0;
5480 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
5482 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
5483 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
5484 rbAllocInfo->allocInfo.raType2.rbStart,
5485 rbAllocInfo->allocInfo.raType2.numRb);
5488 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = \
5491 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
5492 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
5495 break; /* case TFU_DCI_FORMAT_1A: */
5496 case TFU_DCI_FORMAT_1:
5497 pdcch->dci.u.format1Info.tpcCmd = 0;
5498 /* Avoiding this check,as we dont support Type1 RA */
5500 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
5503 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
5504 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
5505 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
5507 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
5508 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
5510 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
5511 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
5513 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
5514 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
5518 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
5519 pdcch->dci.u.format1Info.allocInfo.ndi = 0;
5520 pdcch->dci.u.format1Info.allocInfo.mcs = rbAllocInfo->tbInfo[0].imcs;
5521 pdcch->dci.u.format1Info.allocInfo.rv = 0;
5523 pdcch->dci.u.format1Info.dai = 1;
5527 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Allocator's icorrect "
5528 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
5536 * @brief This function finds whether the subframe is special subframe or not.
5540 * Function: rgSCHCmnIsSplSubfrm
5541 * Purpose: This function finds the subframe index of the special subframe
5542 * and finds whether the current DL index matches it or not.
5544 * Invoked by: Scheduler
5546 * @param[in] U8 splfrmCnt
5547 * @param[in] U8 curSubfrmIdx
5548 * @param[in] U8 periodicity
5549 * @param[in] RgSchTddSubfrmInfo *subfrmInfo
5554 PRIVATE Bool rgSCHCmnIsSplSubfrm
5559 RgSchTddSubfrmInfo *subfrmInfo
5562 PRIVATE Bool rgSCHCmnIsSplSubfrm(splfrmCnt, curSubfrmIdx, periodicity, subfrmInfo)
5566 RgSchTddSubfrmInfo *subfrmInfo;
5572 TRC2(rgSCHCmnIsSplSubfrm);
5576 if(periodicity == RG_SCH_CMN_5_MS_PRD)
5580 dlSfCnt = ((splfrmCnt-1)/2) *\
5581 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5582 dlSfCnt = dlSfCnt + subfrmInfo->numFrmHf1;
5586 dlSfCnt = (splfrmCnt/2) * \
5587 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5592 dlSfCnt = splfrmCnt * subfrmInfo->numFrmHf1;
5594 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1 +\
5595 (periodicity*splfrmCnt - dlSfCnt);
5599 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1;
5602 if(splfrmIdx == curSubfrmIdx)
5611 * @brief This function updates DAI or UL index.
5615 * Function: rgSCHCmnUpdHqAndDai
5616 * Purpose: Updates the DAI based on UL-DL Configuration
5617 * index and UE. It also updates the HARQ feedback
5618 * time and 'm' index.
5622 * @param[in] RgDlHqProcCb *hqP
5623 * @param[in] RgSchDlSf *subFrm
5624 * @param[in] RgSchDlHqTbCb *tbCb
5625 * @param[in] U8 tbAllocIdx
5630 PRIVATE Void rgSCHCmnUpdHqAndDai
5632 RgSchDlHqProcCb *hqP,
5634 RgSchDlHqTbCb *tbCb,
5638 PRIVATE Void rgSCHCmnUpdHqAndDai(hqP, subFrm, tbCb,tbAllocIdx)
5639 RgSchDlHqProcCb *hqP;
5641 RgSchDlHqTbCb *tbCb;
5645 RgSchUeCb *ue = hqP->hqE->ue;
5647 TRC2(rgSCHCmnUpdHqAndDai);
5651 /* set the time at which UE shall send the feedback
5652 * for this process */
5653 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5654 subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5655 tbCb->fdbkTime.subframe = subFrm->dlFdbkInfo.subframe;
5656 tbCb->m = subFrm->dlFdbkInfo.m;
5660 /* set the time at which UE shall send the feedback
5661 * for this process */
5662 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5663 hqP->subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5664 tbCb->fdbkTime.subframe = hqP->subFrm->dlFdbkInfo.subframe;
5665 tbCb->m = hqP->subFrm->dlFdbkInfo.m;
5668 /* ccpu00132340-MOD- DAI need to be updated for first TB only*/
5669 if(ue && !tbAllocIdx)
5671 Bool havePdcch = (tbCb->hqP->pdcch ? TRUE : FALSE);
5674 dlDai = rgSCHCmnUpdDai(ue, &tbCb->fdbkTime, tbCb->m, havePdcch,tbCb->hqP,
5677 {/* Non SPS occasions */
5678 tbCb->hqP->pdcch->dlDai = dlDai;
5679 /* hqP->ulDai is used for N1 resource filling
5680 * when SPS occaions present in a bundle */
5681 tbCb->hqP->ulDai = tbCb->dai;
5682 tbCb->hqP->dlDai = dlDai;
5686 /* Updatijng pucchFdbkIdx for both PUCCH or PUSCH
5688 tbCb->pucchFdbkIdx = tbCb->hqP->ulDai;
5695 * @brief This function updates DAI or UL index.
5699 * Function: rgSCHCmnUpdDai
5700 * Purpose: Updates the DAI in the ack-nack info, a valid
5701 * ue should be passed
5705 * @param[in] RgDlHqProcCb *hqP
5706 * @param[in] RgSchDlSf *subFrm
5707 * @param[in] RgSchDlHqTbCb *tbCb
5712 PUBLIC U8 rgSCHCmnUpdDai
5715 CmLteTimingInfo *fdbkTime,
5718 RgSchDlHqProcCb *hqP,
5722 PUBLIC U8 rgSCHCmnUpdDai(ue, fdbkTime, m, havePdcch,tbCb,servCellId,hqP,ulDai)
5724 CmLteTimingInfo *fdbkTime;
5727 RgSchDlHqProcCb *hqP;
5731 RgSchTddANInfo *anInfo;
5733 U8 ackNackFdbkArrSize;
5736 TRC2(rgSCHCmnUpdDai);
5741 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
5742 hqP->hqE->cell->cellId,
5745 servCellIdx = RGSCH_PCELL_INDEX;
5747 ackNackFdbkArrSize = hqP->hqE->cell->ackNackFdbkArrSize;
5749 {/* SPS on primary cell */
5750 servCellIdx = RGSCH_PCELL_INDEX;
5751 ackNackFdbkArrSize = ue->cell->ackNackFdbkArrSize;
5755 anInfo = rgSCHUtlGetUeANFdbkInfo(ue, fdbkTime,servCellIdx);
5757 /* If no ACK/NACK feedback already present, create a new one */
5760 anInfo = &ue->cellInfo[servCellIdx]->anInfo[ue->cellInfo[servCellIdx]->nextFreeANIdx];
5761 anInfo->sfn = fdbkTime->sfn;
5762 anInfo->subframe = fdbkTime->subframe;
5763 anInfo->latestMIdx = m;
5764 /* Fixing DAI value - ccpu00109162 */
5765 /* Handle TDD case as in MIMO definition of the function */
5771 anInfo->isSpsOccasion = FALSE;
5772 /* set the free Index to store Ack/Nack Information*/
5773 ue->cellInfo[servCellIdx]->nextFreeANIdx = (ue->cellInfo[servCellIdx]->nextFreeANIdx + 1) %
5779 anInfo->latestMIdx = m;
5780 /* Fixing DAI value - ccpu00109162 */
5781 /* Handle TDD case as in MIMO definition of the function */
5782 anInfo->ulDai = anInfo->ulDai + 1;
5785 anInfo->dlDai = anInfo->dlDai + 1;
5789 /* ignoring the Scell check,
5790 * for primary cell this field is unused*/
5793 anInfo->n1ResTpcIdx = hqP->tpc;
5797 {/* As this not required for release pdcch */
5798 *ulDai = anInfo->ulDai;
5801 RETVALUE(anInfo->dlDai);
5804 #endif /* ifdef LTE_TDD */
5806 PUBLIC U32 rgHqRvRetxCnt[4][2];
5807 PUBLIC U32 rgUlrate_grant;
5810 * @brief This function fills the HqP TB with rbAllocInfo.
5814 * Function: rgSCHCmnFillHqPTb
5815 * Purpose: This function fills in the HqP TB with rbAllocInfo.
5817 * Invoked by: rgSCHCmnFillHqPTb
5819 * @param[in] RgSchCellCb* cell
5820 * @param[in] RgSchDlRbAlloc *rbAllocInfo,
5821 * @param[in] U8 tbAllocIdx
5822 * @param[in] RgSchPdcch *pdcch
5828 PUBLIC Void rgSCHCmnFillHqPTb
5831 RgSchDlRbAlloc *rbAllocInfo,
5836 PUBLIC Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5838 RgSchDlRbAlloc *rbAllocInfo;
5844 PRIVATE Void rgSCHCmnFillHqPTb
5847 RgSchDlRbAlloc *rbAllocInfo,
5852 PRIVATE Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5854 RgSchDlRbAlloc *rbAllocInfo;
5858 #endif /* LTEMAC_SPS */
5860 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
5861 RgSchDlTbAllocInfo *tbAllocInfo = &rbAllocInfo->tbInfo[tbAllocIdx];
5862 RgSchDlHqTbCb *tbInfo = tbAllocInfo->tbCb;
5863 RgSchDlHqProcCb *hqP = tbInfo->hqP;
5865 TRC2(rgSCHCmnFillHqPTb);
5867 /*ccpu00120365-ADD-if tb is disabled, set mcs=0,rv=1.
5868 * Relevant for DCI format 2 & 2A as per 36.213-7.1.7.2
5870 if ( tbAllocInfo->isDisabled)
5873 tbInfo->dlGrnt.iMcs = 0;
5874 tbInfo->dlGrnt.rv = 1;
5876 /* Fill for TB retransmission */
5877 else if (tbInfo->txCntr > 0)
5880 tbInfo->timingInfo = cmnCellDl->time;
5882 if ((tbInfo->isAckNackDtx == TFU_HQFDB_DTX))
5884 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5885 rgHqRvRetxCnt[tbInfo->dlGrnt.rv][tbInfo->tbIdx]++;
5889 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[++(tbInfo->ccchSchdInfo.rvIdx) & 0x03];
5892 /* fill the scheduler information of hqProc */
5893 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5894 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx,hqP->tbInfo,tbInfo->tbIdx );
5895 rgSCHDhmHqTbRetx(hqP->hqE, tbInfo->timingInfo, hqP, tbInfo->tbIdx);
5897 /* Fill for TB transmission */
5900 /* Fill the HqProc */
5901 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5902 tbInfo->tbSz = tbAllocInfo->bytesAlloc;
5903 tbInfo->timingInfo = cmnCellDl->time;
5905 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[0];
5906 /* fill the scheduler information of hqProc */
5907 tbInfo->ccchSchdInfo.rvIdx = 0;
5908 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5909 /* DwPts Scheduling Changes Start */
5910 /* DwPts Scheduling Changes End */
5911 cell->measurements.dlBytesCnt += tbAllocInfo->bytesAlloc;
5914 /*ccpu00120365:-ADD-only add to subFrm list if tb is not disabled */
5915 if ( tbAllocInfo->isDisabled == FALSE )
5917 /* Set the number of transmitting SM layers for this TB */
5918 tbInfo->numLyrs = tbAllocInfo->noLyr;
5919 /* Set the TB state as WAITING to indicate TB has been
5920 * considered for transmission */
5921 tbInfo->state = HQ_TB_WAITING;
5922 hqP->subFrm = rbAllocInfo->dlSf;
5923 tbInfo->hqP->pdcch = pdcch;
5924 //tbInfo->dlGrnt.numRb = rbAllocInfo->rbsAlloc;
5925 rgSCHUtlDlHqPTbAddToTx(hqP->subFrm, hqP, tbInfo->tbIdx);
5931 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
5935 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
5936 * Purpose: This function fills in the PDCCH information
5937 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
5938 * for dedicated service scheduling. It also
5939 * obtains TPC to be filled in from the power module.
5940 * Assign the PDCCH to HQProc.
5942 * Invoked by: Downlink Scheduler
5944 * @param[in] RgSchCellCb* cell
5945 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5946 * @param[in] RgDlHqProc* hqP
5947 * @param[out] RgSchPdcch *pdcch
5953 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2
5956 RgSchDlRbAlloc *rbAllocInfo,
5957 RgSchDlHqProcCb *hqP,
5962 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, pdcch, tpc)
5964 RgSchDlRbAlloc *rbAllocInfo;
5965 RgSchDlHqProcCb *hqP;
5971 TRC2(rgSCHCmnFillHqPPdcchDciFrmtB1B2)
5973 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
5974 //Currently hardcoding values here.
5975 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
5976 switch(rbAllocInfo->dciFormat)
5978 case TFU_DCI_FORMAT_B1:
5980 pdcch->dci.u.formatB1Info.formatType = 0;
5981 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5982 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
5983 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
5984 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5985 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
5986 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
5987 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5988 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5989 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5990 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5991 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5992 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5993 //TODO_SID: Need to update
5994 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5995 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5996 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5997 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5998 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5999 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
6000 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
6001 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
6002 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
6003 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
6006 case TFU_DCI_FORMAT_B2:
6008 pdcch->dci.u.formatB2Info.formatType = 1;
6009 pdcch->dci.u.formatB2Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
6010 pdcch->dci.u.formatB2Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
6011 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
6012 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
6013 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6014 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6015 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
6016 pdcch->dci.u.formatB2Info.CSI_BSI_BRI_Req = 0;
6017 pdcch->dci.u.formatB2Info.CSIRS_BRRS_TxTiming = 0;
6018 pdcch->dci.u.formatB2Info.CSIRS_BRRS_SymbIdx = 0;
6019 pdcch->dci.u.formatB2Info.CSIRS_BRRS_ProcInd = 0;
6020 pdcch->dci.u.formatB2Info.xPUCCH_TxTiming = 0;
6021 //TODO_SID: Need to update
6022 pdcch->dci.u.formatB2Info.freqResIdx_xPUCCH = 0;
6023 pdcch->dci.u.formatB2Info.beamSwitch = 0;
6024 pdcch->dci.u.formatB2Info.SRS_Config = 0;
6025 pdcch->dci.u.formatB2Info.SRS_Symbol = 0;
6026 //TODO_SID: Need to check.Currently setting 4(2 layer, ports(8,9) w/o OCC).
6027 pdcch->dci.u.formatB2Info.AntPorts_numLayers = 4;
6028 pdcch->dci.u.formatB2Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
6029 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
6030 pdcch->dci.u.formatB2Info.tpcCmd = 1; //tpc;
6031 pdcch->dci.u.formatB2Info.DL_PCRS = 0;
6035 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId," 5GTF_ERROR Allocator's icorrect "
6036 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
6043 extern U32 totPcellSCell;
6044 extern U32 addedForScell;
6045 extern U32 addedForScell1;
6046 extern U32 addedForScell2;
6048 * @brief This function fills the PDCCH information from dlProc.
6052 * Function: rgSCHCmnFillHqPPdcch
6053 * Purpose: This function fills in the PDCCH information
6054 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6055 * for dedicated service scheduling. It also
6056 * obtains TPC to be filled in from the power module.
6057 * Assign the PDCCH to HQProc.
6059 * Invoked by: Downlink Scheduler
6061 * @param[in] RgSchCellCb* cell
6062 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6063 * @param[in] RgDlHqProc* hqP
6068 PUBLIC Void rgSCHCmnFillHqPPdcch
6071 RgSchDlRbAlloc *rbAllocInfo,
6072 RgSchDlHqProcCb *hqP
6075 PUBLIC Void rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP)
6077 RgSchDlRbAlloc *rbAllocInfo;
6078 RgSchDlHqProcCb *hqP;
6081 RgSchCmnDlCell *cmnCell = RG_SCH_CMN_GET_DL_CELL(cell);
6082 RgSchPdcch *pdcch = rbAllocInfo->pdcch;
6085 TRC2(rgSCHCmnFillHqPPdcch);
6090 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6097 tpc = rgSCHPwrPucchTpcForUe(cell, hqP->hqE->ue);
6099 /* Fix: syed moving this to a common function for both scheduled
6100 * and non-scheduled UEs */
6102 pdcch->ue = hqP->hqE->ue;
6103 if (hqP->hqE->ue->csgMmbrSta == FALSE)
6105 cmnCell->ncsgPrbCnt += rbAllocInfo->rbsAlloc;
6107 cmnCell->totPrbCnt += rbAllocInfo->rbsAlloc;
6110 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlPrbUsg +=
6111 rbAllocInfo->rbsAlloc;
6112 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw0iTbs +=
6113 rbAllocInfo->tbInfo[0].iTbs;
6114 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw0iTbs ++;
6115 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6116 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6119 totPcellSCell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6120 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6122 addedForScell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6123 addedForScell1 += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6125 printf (" Hqp %d cell %d addedForScell %lu addedForScell1 %lu sfn:sf %d:%d \n",
6127 hqP->hqE->cell->cellId,
6131 cell->crntTime.slot);
6135 hqP->hqE->cell->tenbStats->sch.dlPrbUsage[0] +=
6136 rbAllocInfo->rbsAlloc;
6137 hqP->hqE->cell->tenbStats->sch.dlSumCw0iTbs +=
6138 rbAllocInfo->tbInfo[0].iTbs;
6139 hqP->hqE->cell->tenbStats->sch.dlNumCw0iTbs ++;
6140 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6141 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6142 if (rbAllocInfo->tbInfo[1].schdlngForTb)
6144 hqP->hqE->cell->tenbStats->sch.dlSumCw1iTbs +=
6145 rbAllocInfo->tbInfo[1].iTbs;
6146 hqP->hqE->cell->tenbStats->sch.dlNumCw1iTbs ++;
6147 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw1iTbs +=
6148 rbAllocInfo->tbInfo[1].iTbs;
6149 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw1iTbs ++;
6150 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6151 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6155 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6157 addedForScell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6158 addedForScell2 += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6160 printf (" Hqp %d cell %d addedForScell %lu addedForScell2 %lu \n",
6162 hqP->hqE->cell->cellId,
6167 totPcellSCell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6171 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6172 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6175 printf ("add DL TPT is %lu sfn:sf %d:%d \n", hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt ,
6177 cell->crntTime.slot);
6183 pdcch->rnti = rbAllocInfo->rnti;
6184 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
6185 /* Update subframe and pdcch info in HqTb control block */
6186 switch(rbAllocInfo->dciFormat)
6189 case TFU_DCI_FORMAT_B1:
6190 case TFU_DCI_FORMAT_B2:
6192 // printf(" RG_5GTF:: Pdcch filling with DCI format B1/B2\n");
6193 rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, \
6199 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6200 "Allocator's incorrect dciForamt Fill for RNTI:%d",rbAllocInfo->rnti);
6207 * @brief This function fills the PDCCH DCI format 1 information from dlProc.
6211 * Function: rgSCHCmnFillHqPPdcchDciFrmt1
6212 * Purpose: This function fills in the PDCCH information
6213 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6214 * for dedicated service scheduling. It also
6215 * obtains TPC to be filled in from the power module.
6216 * Assign the PDCCH to HQProc.
6218 * Invoked by: Downlink Scheduler
6220 * @param[in] RgSchCellCb* cell
6221 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6222 * @param[in] RgDlHqProc* hqP
6223 * @param[out] RgSchPdcch *pdcch
6230 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1
6233 RgSchDlRbAlloc *rbAllocInfo,
6234 RgSchDlHqProcCb *hqP,
6239 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1(cell, rbAllocInfo, hqP, pdcch, tpc)
6241 RgSchDlRbAlloc *rbAllocInfo;
6242 RgSchDlHqProcCb *hqP;
6249 RgSchTddANInfo *anInfo;
6253 /* For activation or reactivation,
6254 * Harq ProcId should be 0 */
6255 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6258 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1)
6260 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6261 pdcch->dci.u.format1Info.tpcCmd = tpc;
6262 /* Avoiding this check,as we dont support Type1 RA */
6264 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6267 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
6268 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
6269 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6271 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
6272 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6274 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
6275 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6277 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
6278 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6283 if ((!(hqP->tbInfo[0].txCntr)) &&
6284 (cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6285 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6286 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV)))
6289 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6293 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6296 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6299 pdcch->dci.u.format1Info.allocInfo.ndi =
6300 rbAllocInfo->tbInfo[0].tbCb->ndi;
6301 pdcch->dci.u.format1Info.allocInfo.mcs =
6302 rbAllocInfo->tbInfo[0].imcs;
6303 pdcch->dci.u.format1Info.allocInfo.rv =
6304 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6306 if(hqP->hqE->ue != NULLP)
6309 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6310 hqP->hqE->cell->cellId,
6313 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6314 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6316 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6317 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6322 pdcch->dci.u.format1Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6326 /* Fixing DAI value - ccpu00109162 */
6327 pdcch->dci.u.format1Info.dai = RG_SCH_MAX_DAI_IDX;
6333 /* always 0 for RACH */
6334 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6336 /* Fixing DAI value - ccpu00109162 */
6337 pdcch->dci.u.format1Info.dai = 1;
6346 * @brief This function fills the PDCCH DCI format 1A information from dlProc.
6350 * Function: rgSCHCmnFillHqPPdcchDciFrmt1A
6351 * Purpose: This function fills in the PDCCH information
6352 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6353 * for dedicated service scheduling. It also
6354 * obtains TPC to be filled in from the power module.
6355 * Assign the PDCCH to HQProc.
6357 * Invoked by: Downlink Scheduler
6359 * @param[in] RgSchCellCb* cell
6360 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6361 * @param[in] RgDlHqProc* hqP
6362 * @param[out] RgSchPdcch *pdcch
6368 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A
6371 RgSchDlRbAlloc *rbAllocInfo,
6372 RgSchDlHqProcCb *hqP,
6377 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A(cell, rbAllocInfo, hqP, pdcch, tpc)
6379 RgSchDlRbAlloc *rbAllocInfo;
6380 RgSchDlHqProcCb *hqP;
6387 RgSchTddANInfo *anInfo;
6391 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6394 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1A)
6396 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6397 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
6398 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = tpc;
6399 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
6400 rbAllocInfo->tbInfo[0].imcs;
6401 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = TRUE;
6403 if ((!(hqP->tbInfo[0].txCntr)) &&
6404 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6405 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6406 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6409 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val = 0;
6413 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val
6417 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val =
6420 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = \
6421 rbAllocInfo->tbInfo[0].tbCb->ndi;
6422 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = \
6423 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6424 /* As of now, we do not support Distributed allocations */
6425 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
6426 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
6427 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
6429 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
6430 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6431 rbAllocInfo->allocInfo.raType2.rbStart,
6432 rbAllocInfo->allocInfo.raType2.numRb);
6434 if(hqP->hqE->ue != NULLP)
6437 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6438 hqP->hqE->cell->cellId,
6440 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6441 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6443 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6444 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6447 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6450 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val =
6451 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6455 /* Fixing DAI value - ccpu00109162 */
6456 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = RG_SCH_MAX_DAI_IDX;
6457 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6458 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6465 /* always 0 for RACH */
6466 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres
6469 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6470 /* Fixing DAI value - ccpu00109162 */
6471 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
6479 * @brief This function fills the PDCCH DCI format 1B information from dlProc.
6483 * Function: rgSCHCmnFillHqPPdcchDciFrmt1B
6484 * Purpose: This function fills in the PDCCH information
6485 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6486 * for dedicated service scheduling. It also
6487 * obtains TPC to be filled in from the power module.
6488 * Assign the PDCCH to HQProc.
6490 * Invoked by: Downlink Scheduler
6492 * @param[in] RgSchCellCb* cell
6493 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6494 * @param[in] RgDlHqProc* hqP
6495 * @param[out] RgSchPdcch *pdcch
6501 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B
6504 RgSchDlRbAlloc *rbAllocInfo,
6505 RgSchDlHqProcCb *hqP,
6510 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B(cell, rbAllocInfo, hqP, pdcch, tpc)
6512 RgSchDlRbAlloc *rbAllocInfo;
6513 RgSchDlHqProcCb *hqP;
6520 RgSchTddANInfo *anInfo;
6524 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6527 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1B)
6529 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6530 pdcch->dci.u.format1bInfo.tpcCmd = tpc;
6531 pdcch->dci.u.format1bInfo.allocInfo.mcs = \
6532 rbAllocInfo->tbInfo[0].imcs;
6534 if ((!(hqP->tbInfo[0].txCntr)) &&
6535 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6536 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6537 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6540 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = 0;
6544 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6547 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6549 pdcch->dci.u.format1bInfo.allocInfo.ndi = \
6550 rbAllocInfo->tbInfo[0].tbCb->ndi;
6551 pdcch->dci.u.format1bInfo.allocInfo.rv = \
6552 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6553 /* As of now, we do not support Distributed allocations */
6554 pdcch->dci.u.format1bInfo.allocInfo.isLocal = TRUE;
6555 pdcch->dci.u.format1bInfo.allocInfo.nGap2.pres = NOTPRSNT;
6556 pdcch->dci.u.format1bInfo.allocInfo.alloc.type =
6558 pdcch->dci.u.format1bInfo.allocInfo.alloc.u.riv =
6559 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6560 rbAllocInfo->allocInfo.raType2.rbStart,
6561 rbAllocInfo->allocInfo.raType2.numRb);
6562 /* Fill precoding Info */
6563 pdcch->dci.u.format1bInfo.allocInfo.pmiCfm = \
6564 rbAllocInfo->mimoAllocInfo.precIdxInfo >> 4;
6565 pdcch->dci.u.format1bInfo.allocInfo.tPmi = \
6566 rbAllocInfo->mimoAllocInfo.precIdxInfo & 0x0F;
6568 if(hqP->hqE->ue != NULLP)
6571 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6572 hqP->hqE->cell->cellId,
6574 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6575 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6577 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6578 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6583 pdcch->dci.u.format1bInfo.dai =
6584 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6588 pdcch->dci.u.format1bInfo.dai = RG_SCH_MAX_DAI_IDX;
6589 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6590 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6601 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
6605 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
6606 * Purpose: This function fills in the PDCCH information
6607 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6608 * for dedicated service scheduling. It also
6609 * obtains TPC to be filled in from the power module.
6610 * Assign the PDCCH to HQProc.
6612 * Invoked by: Downlink Scheduler
6614 * @param[in] RgSchCellCb* cell
6615 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6616 * @param[in] RgDlHqProc* hqP
6617 * @param[out] RgSchPdcch *pdcch
6623 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2
6626 RgSchDlRbAlloc *rbAllocInfo,
6627 RgSchDlHqProcCb *hqP,
6632 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2(cell, rbAllocInfo, hqP, pdcch, tpc)
6634 RgSchDlRbAlloc *rbAllocInfo;
6635 RgSchDlHqProcCb *hqP;
6642 RgSchTddANInfo *anInfo;
6646 /* ccpu00119023-ADD-For activation or reactivation,
6647 * Harq ProcId should be 0 */
6648 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6651 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2)
6653 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6654 /*ccpu00120365:-ADD-call also if tb is disabled */
6655 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6656 rbAllocInfo->tbInfo[1].isDisabled)
6658 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6660 pdcch->dci.u.format2Info.tpcCmd = tpc;
6661 /* Avoiding this check,as we dont support Type1 RA */
6663 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6666 pdcch->dci.u.format2Info.allocInfo.isAllocType0 = TRUE;
6667 pdcch->dci.u.format2Info.allocInfo.resAllocMap[0] =
6668 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6670 pdcch->dci.u.format2Info.allocInfo.resAllocMap[1] =
6671 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6673 pdcch->dci.u.format2Info.allocInfo.resAllocMap[2] =
6674 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6676 pdcch->dci.u.format2Info.allocInfo.resAllocMap[3] =
6677 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6682 if ((!(hqP->tbInfo[0].txCntr)) &&
6683 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6684 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6685 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6688 pdcch->dci.u.format2Info.allocInfo.harqProcId = 0;
6692 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6695 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6697 /* Initialize the TB info for both the TBs */
6698 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].mcs = 0;
6699 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].rv = 1;
6700 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].mcs = 0;
6701 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].rv = 1;
6702 /* Fill tbInfo for scheduled TBs */
6703 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6704 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6705 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6706 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6707 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6708 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6709 /* If we reach this function. It is safely assumed that
6710 * rbAllocInfo->tbInfo[0] always has non default valid values.
6711 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6712 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6714 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6715 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6716 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6717 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6718 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6719 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6721 pdcch->dci.u.format2Info.allocInfo.transSwap =
6722 rbAllocInfo->mimoAllocInfo.swpFlg;
6723 pdcch->dci.u.format2Info.allocInfo.precoding =
6724 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6726 if(hqP->hqE->ue != NULLP)
6730 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6731 hqP->hqE->cell->cellId,
6733 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6734 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6736 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6737 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6742 pdcch->dci.u.format2Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6746 pdcch->dci.u.format2Info.dai = RG_SCH_MAX_DAI_IDX;
6747 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6748 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6758 * @brief This function fills the PDCCH DCI format 2A information from dlProc.
6762 * Function: rgSCHCmnFillHqPPdcchDciFrmt2A
6763 * Purpose: This function fills in the PDCCH information
6764 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6765 * for dedicated service scheduling. It also
6766 * obtains TPC to be filled in from the power module.
6767 * Assign the PDCCH to HQProc.
6769 * Invoked by: Downlink Scheduler
6771 * @param[in] RgSchCellCb* cell
6772 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6773 * @param[in] RgDlHqProc* hqP
6774 * @param[out] RgSchPdcch *pdcch
6780 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A
6783 RgSchDlRbAlloc *rbAllocInfo,
6784 RgSchDlHqProcCb *hqP,
6789 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A(cell, rbAllocInfo, hqP, pdcch, tpc)
6791 RgSchDlRbAlloc *rbAllocInfo;
6792 RgSchDlHqProcCb *hqP;
6798 RgSchTddANInfo *anInfo;
6802 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6805 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2A)
6807 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6808 /*ccpu00120365:-ADD-call also if tb is disabled */
6809 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6810 rbAllocInfo->tbInfo[1].isDisabled)
6813 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6816 pdcch->dci.u.format2AInfo.tpcCmd = tpc;
6817 /* Avoiding this check,as we dont support Type1 RA */
6819 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6822 pdcch->dci.u.format2AInfo.allocInfo.isAllocType0 = TRUE;
6823 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[0] =
6824 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6826 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[1] =
6827 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6829 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[2] =
6830 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6832 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[3] =
6833 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6838 if ((!(hqP->tbInfo[0].txCntr)) &&
6839 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6840 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6841 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6844 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = 0;
6848 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6851 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6853 /* Initialize the TB info for both the TBs */
6854 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].mcs = 0;
6855 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].rv = 1;
6856 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].mcs = 0;
6857 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].rv = 1;
6858 /* Fill tbInfo for scheduled TBs */
6859 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6860 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6861 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6862 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6863 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6864 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6865 /* If we reach this function. It is safely assumed that
6866 * rbAllocInfo->tbInfo[0] always has non default valid values.
6867 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6869 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6871 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6872 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6873 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6874 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6875 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6876 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6879 pdcch->dci.u.format2AInfo.allocInfo.transSwap =
6880 rbAllocInfo->mimoAllocInfo.swpFlg;
6881 pdcch->dci.u.format2AInfo.allocInfo.precoding =
6882 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6884 if(hqP->hqE->ue != NULLP)
6887 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6888 hqP->hqE->cell->cellId,
6890 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6891 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6893 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6894 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6899 pdcch->dci.u.format2AInfo.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6903 pdcch->dci.u.format2AInfo.dai = RG_SCH_MAX_DAI_IDX;
6904 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6905 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6917 * @brief init of Sch vars.
6921 * Function: rgSCHCmnInitVars
6922 Purpose: Initialization of various UL subframe indices
6924 * @param[in] RgSchCellCb *cell
6929 PRIVATE Void rgSCHCmnInitVars
6934 PRIVATE Void rgSCHCmnInitVars(cell)
6938 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6940 TRC2(rgSCHCmnInitVars);
6942 cellUl->idx = RGSCH_INVALID_INFO;
6943 cellUl->schdIdx = RGSCH_INVALID_INFO;
6944 cellUl->schdHqProcIdx = RGSCH_INVALID_INFO;
6945 cellUl->msg3SchdIdx = RGSCH_INVALID_INFO;
6947 cellUl->emtcMsg3SchdIdx = RGSCH_INVALID_INFO;
6949 cellUl->msg3SchdHqProcIdx = RGSCH_INVALID_INFO;
6950 cellUl->rcpReqIdx = RGSCH_INVALID_INFO;
6951 cellUl->hqFdbkIdx[0] = RGSCH_INVALID_INFO;
6952 cellUl->hqFdbkIdx[1] = RGSCH_INVALID_INFO;
6953 cellUl->reTxIdx[0] = RGSCH_INVALID_INFO;
6954 cellUl->reTxIdx[1] = RGSCH_INVALID_INFO;
6955 /* Stack Crash problem for TRACE5 Changes. Added the return below */
6962 * @brief Updation of Sch vars per TTI.
6966 * Function: rgSCHCmnUpdVars
6967 * Purpose: Updation of Sch vars per TTI.
6969 * @param[in] RgSchCellCb *cell
6974 PUBLIC Void rgSCHCmnUpdVars
6979 PUBLIC Void rgSCHCmnUpdVars(cell)
6983 CmLteTimingInfo timeInfo;
6984 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6987 TRC2(rgSCHCmnUpdVars);
6989 idx = (cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot);
6990 cellUl->idx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6992 printf("idx %d cellUl->idx %d RGSCH_NUM_SUB_FRAMES_5G %d time(%d %d) \n",idx,cellUl->idx ,RGSCH_NUM_SUB_FRAMES_5G,cell->crntTime.sfn,cell->crntTime.slot);
6994 /* Need to scheduler for after SCHED_DELTA */
6995 /* UL allocation has been advanced by 1 subframe
6996 * so that we do not wrap around and send feedback
6997 * before the data is even received by the PHY */
6998 /* Introduced timing delta for UL control */
6999 idx = (cellUl->idx + TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA);
7000 cellUl->schdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7002 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
7003 TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA)
7004 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
7006 /* ccpu00127193 filling schdTime for logging and enhancement purpose*/
7007 cellUl->schdTime = timeInfo;
7009 /* msg3 scheduling two subframes after general scheduling */
7010 idx = (cellUl->idx + RG_SCH_CMN_DL_DELTA + RGSCH_RARSP_MSG3_DELTA);
7011 cellUl->msg3SchdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7013 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
7014 RG_SCH_CMN_DL_DELTA+ RGSCH_RARSP_MSG3_DELTA)
7015 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
7017 idx = (cellUl->idx + TFU_RECPREQ_DLDELTA);
7019 cellUl->rcpReqIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7021 /* Downlink harq feedback is sometime after data reception / harq failure */
7022 /* Since feedback happens prior to scheduling being called, we add 1 to */
7023 /* take care of getting the correct subframe for feedback */
7024 idx = (cellUl->idx - TFU_CRCIND_ULDELTA + RG_SCH_CMN_UL_NUM_SF);
7026 printf("Finally setting cellUl->hqFdbkIdx[0] = %d TFU_CRCIND_ULDELTA %d RG_SCH_CMN_UL_NUM_SF %d\n",idx,TFU_CRCIND_ULDELTA,RG_SCH_CMN_UL_NUM_SF);
7028 cellUl->hqFdbkIdx[0] = (idx % (RG_SCH_CMN_UL_NUM_SF));
7030 idx = ((cellUl->schdIdx) % (RG_SCH_CMN_UL_NUM_SF));
7032 cellUl->reTxIdx[0] = (U8) idx;
7034 printf("cellUl->hqFdbkIdx[0] %d cellUl->reTxIdx[0] %d \n",cellUl->hqFdbkIdx[0], cellUl->reTxIdx[0] );
7036 /* RACHO: update cmn sched specific RACH variables,
7037 * mainly the prachMaskIndex */
7038 rgSCHCmnUpdRachParam(cell);
7047 * @brief To get uplink subframe index associated with current PHICH
7052 * Function: rgSCHCmnGetPhichUlSfIdx
7053 * Purpose: Gets uplink subframe index associated with current PHICH
7054 * transmission based on SFN and subframe no
7056 * @param[in] CmLteTimingInfo *timeInfo
7057 * @param[in] RgSchCellCb *cell
7062 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx
7064 CmLteTimingInfo *timeInfo,
7068 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx(timeInfo, cell)
7069 CmLteTimingInfo *timeInfo;
7073 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7075 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7081 TRC2(rgSCHCmnGetPhichUlSfIdx);
7083 dlsf = rgSCHUtlSubFrmGet(cell, *timeInfo);
7085 if(dlsf->phichOffInfo.sfnOffset == RGSCH_INVALID_INFO)
7087 RETVALUE(RGSCH_INVALID_INFO);
7089 subframe = dlsf->phichOffInfo.subframe;
7091 sfn = (RGSCH_MAX_SFN + timeInfo->sfn -
7092 dlsf->phichOffInfo.sfnOffset) % RGSCH_MAX_SFN;
7094 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7095 * wrap case such that idx will be proper*/
7096 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7097 numUlSf = ((numUlSf * sfn) + rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subframe]) - 1;
7098 idx = numUlSf % (cellUl->numUlSubfrms);
7104 * @brief To get uplink subframe index.
7109 * Function: rgSCHCmnGetUlSfIdx
7110 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7112 * @param[in] CmLteTimingInfo *timeInfo
7113 * @param[in] U8 ulDlCfgIdx
7118 PUBLIC U8 rgSCHCmnGetUlSfIdx
7120 CmLteTimingInfo *timeInfo,
7124 PUBLIC U8 rgSCHCmnGetUlSfIdx(timeInfo, cell)
7125 CmLteTimingInfo *timeInfo;
7129 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7130 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7134 TRC2(rgSCHCmnGetUlSfIdx);
7136 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7137 * wrap case such that idx will be proper*/
7138 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7139 numUlSf = ((numUlSf * timeInfo->sfn) + \
7140 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->subframe]) - 1;
7141 idx = numUlSf % (cellUl->numUlSubfrms);
7149 * @brief To get uplink hq index.
7154 * Function: rgSCHCmnGetUlHqProcIdx
7155 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7157 * @param[in] CmLteTimingInfo *timeInfo
7158 * @param[in] U8 ulDlCfgIdx
7163 PUBLIC U8 rgSCHCmnGetUlHqProcIdx
7165 CmLteTimingInfo *timeInfo,
7169 PUBLIC U8 rgSCHCmnGetUlHqProcIdx(timeInfo, cell)
7170 CmLteTimingInfo *timeInfo;
7178 numUlSf = (timeInfo->sfn * RGSCH_NUM_SUB_FRAMES_5G + timeInfo->slot);
7179 procId = numUlSf % RGSCH_NUM_UL_HQ_PROC;
7181 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7182 /*ccpu00130639 - MOD - To get correct UL HARQ Proc IDs for all UL/DL Configs*/
7184 S8 sfnCycle = cell->tddHqSfnCycle;
7185 U8 numUlHarq = rgSchTddUlNumHarqProcTbl[ulDlCfgIdx]
7187 /* TRACE 5 Changes */
7188 TRC2(rgSCHCmnGetUlHqProcIdx);
7190 /* Calculate the number of UL SF in one SFN */
7191 numUlSfInSfn = RGSCH_NUM_SUB_FRAMES -
7192 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7194 /* Check for the SFN wrap around case */
7195 if(cell->crntTime.sfn == 1023 && timeInfo->sfn == 0)
7199 else if(cell->crntTime.sfn == 0 && timeInfo->sfn == 1023)
7201 /* sfnCycle decremented by 1 */
7202 sfnCycle = (sfnCycle + numUlHarq-1) % numUlHarq;
7204 /* Calculate the total number of UL sf */
7205 /* -1 is done since uplink sf are counted from 0 */
7206 numUlSf = numUlSfInSfn * (timeInfo->sfn + (sfnCycle*1024)) +
7207 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->slot] - 1;
7209 procId = numUlSf % numUlHarq;
7215 /* UL_ALLOC_CHANGES */
7216 /***********************************************************
7218 * Func : rgSCHCmnUlFreeAlloc
7220 * Desc : Free an allocation - invokes UHM and releases
7221 * alloc for the scheduler
7222 * Doest need subframe as argument
7230 **********************************************************/
7232 PUBLIC Void rgSCHCmnUlFreeAlloc
7238 PUBLIC Void rgSCHCmnUlFreeAlloc(cell, alloc)
7240 RgSchUlAlloc *alloc;
7243 RgSchUlHqProcCb *hqProc;
7244 TRC2(rgSCHCmnUlFreeAllocation);
7248 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7249 if ((alloc->hqProc->remTx == 0) &&
7250 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7253 RgSchRaCb *raCb = alloc->raCb;
7254 rgSCHUhmFreeProc(alloc->hqProc, cell);
7255 rgSCHUtlUlAllocRelease(alloc);
7256 rgSCHRamDelRaCb(cell, raCb, TRUE);
7261 hqProc = alloc->hqProc;
7262 rgSCHUtlUlAllocRelease(alloc);
7263 rgSCHUhmFreeProc(hqProc, cell);
7268 /***********************************************************
7270 * Func : rgSCHCmnUlFreeAllocation
7272 * Desc : Free an allocation - invokes UHM and releases
7273 * alloc for the scheduler
7281 **********************************************************/
7283 PUBLIC Void rgSCHCmnUlFreeAllocation
7290 PUBLIC Void rgSCHCmnUlFreeAllocation(cell, sf, alloc)
7293 RgSchUlAlloc *alloc;
7296 RgSchUlHqProcCb *hqProc;
7298 TRC2(rgSCHCmnUlFreeAllocation);
7302 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7303 if ((alloc->hqProc->remTx == 0) &&
7304 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7307 RgSchRaCb *raCb = alloc->raCb;
7308 rgSCHUhmFreeProc(alloc->hqProc, cell);
7309 rgSCHUtlUlAllocRls(sf, alloc);
7310 rgSCHRamDelRaCb(cell, raCb, TRUE);
7315 hqProc = alloc->hqProc;
7316 rgSCHUhmFreeProc(hqProc, cell);
7318 /* re-setting the PRB count while freeing the allocations */
7321 rgSCHUtlUlAllocRls(sf, alloc);
7327 * @brief This function implements PDCCH allocation for an UE
7328 * in the currently running subframe.
7332 * Function: rgSCHCmnPdcchAllocCrntSf
7333 * Purpose: This function determines current DL subframe
7334 * and UE DL CQI to call the actual pdcch allocator
7336 * Note that this function is called only
7337 * when PDCCH request needs to be made during
7338 * uplink scheduling.
7340 * Invoked by: Scheduler
7342 * @param[in] RgSchCellCb *cell
7343 * @param[in] RgSchUeCb *ue
7344 * @return RgSchPdcch *
7345 * -# NULLP when unsuccessful
7348 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf
7354 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf(cell, ue)
7359 CmLteTimingInfo frm = cell->crntTime;
7360 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7362 RgSchPdcch *pdcch = NULLP;
7364 TRC2(rgSCHCmnPdcchAllocCrntSf);
7365 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7366 sf = rgSCHUtlSubFrmGet(cell, frm);
7369 if (ue->allocCmnUlPdcch)
7371 pdcch = rgSCHCmnCmnPdcchAlloc(cell, sf);
7372 /* Since CRNTI Scrambled */
7375 pdcch->dciNumOfBits = ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
7381 //pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, y, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_0, FALSE);
7382 pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_A1, FALSE);
7387 /***********************************************************
7389 * Func : rgSCHCmnUlAllocFillNdmrs
7391 * Desc : Determines and fills N_dmrs for a UE uplink
7396 * Notes: N_dmrs determination is straightforward, so
7397 * it is configured per subband
7401 **********************************************************/
7403 PUBLIC Void rgSCHCmnUlAllocFillNdmrs
7405 RgSchCmnUlCell *cellUl,
7409 PUBLIC Void rgSCHCmnUlAllocFillNdmrs(cellUl, alloc)
7410 RgSchCmnUlCell *cellUl;
7411 RgSchUlAlloc *alloc;
7414 TRC2(rgSCHCmnUlAllocFillNdmrs);
7415 alloc->grnt.nDmrs = cellUl->dmrsArr[alloc->sbStart];
7419 /***********************************************************
7421 * Func : rgSCHCmnUlAllocLnkHqProc
7423 * Desc : Links a new allocation for an UE with the
7424 * appropriate HARQ process of the UE.
7432 **********************************************************/
7434 PUBLIC Void rgSCHCmnUlAllocLnkHqProc
7437 RgSchUlAlloc *alloc,
7438 RgSchUlHqProcCb *proc,
7442 PUBLIC Void rgSCHCmnUlAllocLnkHqProc(ue, alloc, proc, isRetx)
7444 RgSchUlAlloc *alloc;
7445 RgSchUlHqProcCb *proc;
7449 TRC2(rgSCHCmnUlAllocLnkHqProc);
7453 rgSCHCmnUlAdapRetx(alloc, proc);
7457 #ifdef LTE_L2_MEAS /* L2_COUNTERS */
7460 rgSCHUhmNewTx(proc, (((RgUeUlHqCb*)proc->hqEnt)->maxHqRetx), alloc);
7466 * @brief This function releases a PDCCH in the subframe that is
7467 * currently being allocated for.
7471 * Function: rgSCHCmnPdcchRlsCrntSf
7472 * Purpose: This function determines current DL subframe
7473 * which is considered for PDCCH allocation,
7474 * and then calls the actual function that
7475 * releases a PDCCH in a specific subframe.
7476 * Note that this function is called only
7477 * when PDCCH release needs to be made during
7478 * uplink scheduling.
7480 * Invoked by: Scheduler
7482 * @param[in] RgSchCellCb *cell
7483 * @param[in] RgSchPdcch *pdcch
7487 PUBLIC Void rgSCHCmnPdcchRlsCrntSf
7493 PUBLIC Void rgSCHCmnPdcchRlsCrntSf(cell, pdcch)
7498 CmLteTimingInfo frm = cell->crntTime;
7501 TRC2(rgSCHCmnPdcchRlsCrntSf);
7503 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7504 sf = rgSCHUtlSubFrmGet(cell, frm);
7505 rgSCHUtlPdcchPut(cell, &sf->pdcchInfo, pdcch);
7508 /***********************************************************
7510 * Func : rgSCHCmnUlFillPdcchWithAlloc
7512 * Desc : Fills a PDCCH with format 0 information.
7520 **********************************************************/
7522 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc
7525 RgSchUlAlloc *alloc,
7529 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc(pdcch, alloc, ue)
7531 RgSchUlAlloc *alloc;
7536 TRC2(rgSCHCmnUlFillPdcchWithAlloc);
7539 pdcch->rnti = alloc->rnti;
7540 //pdcch->dci.dciFormat = TFU_DCI_FORMAT_A2;
7541 pdcch->dci.dciFormat = alloc->grnt.dciFrmt;
7543 //Currently hardcoding values here.
7544 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
7545 switch(pdcch->dci.dciFormat)
7547 case TFU_DCI_FORMAT_A1:
7549 pdcch->dci.u.formatA1Info.formatType = 0;
7550 pdcch->dci.u.formatA1Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7551 pdcch->dci.u.formatA1Info.xPUSCH_TxTiming = 0;
7552 pdcch->dci.u.formatA1Info.RBAssign = alloc->grnt.rbAssign;
7553 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7554 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7555 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7556 pdcch->dci.u.formatA1Info.CSI_BSI_BRI_Req = 0;
7557 pdcch->dci.u.formatA1Info.CSIRS_BRRS_TxTiming = 0;
7558 pdcch->dci.u.formatA1Info.CSIRS_BRRS_SymbIdx = 0;
7559 pdcch->dci.u.formatA1Info.CSIRS_BRRS_ProcInd = 0;
7560 pdcch->dci.u.formatA1Info.numBSI_Reports = 0;
7561 pdcch->dci.u.formatA1Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7562 pdcch->dci.u.formatA1Info.beamSwitch = 0;
7563 pdcch->dci.u.formatA1Info.SRS_Config = 0;
7564 pdcch->dci.u.formatA1Info.SRS_Symbol = 0;
7565 pdcch->dci.u.formatA1Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7566 pdcch->dci.u.formatA1Info.SCID = alloc->grnt.SCID;
7567 pdcch->dci.u.formatA1Info.PMI = alloc->grnt.PMI;
7568 pdcch->dci.u.formatA1Info.UL_PCRS = 0;
7569 pdcch->dci.u.formatA1Info.tpcCmd = alloc->grnt.tpc;
7572 case TFU_DCI_FORMAT_A2:
7574 pdcch->dci.u.formatA2Info.formatType = 1;
7575 pdcch->dci.u.formatA2Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7576 pdcch->dci.u.formatA2Info.xPUSCH_TxTiming = 0;
7577 pdcch->dci.u.formatA2Info.RBAssign = alloc->grnt.rbAssign;
7578 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7579 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7580 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7581 pdcch->dci.u.formatA2Info.CSI_BSI_BRI_Req = 0;
7582 pdcch->dci.u.formatA2Info.CSIRS_BRRS_TxTiming = 0;
7583 pdcch->dci.u.formatA2Info.CSIRS_BRRS_SymbIdx = 0;
7584 pdcch->dci.u.formatA2Info.CSIRS_BRRS_ProcInd = 0;
7585 pdcch->dci.u.formatA2Info.numBSI_Reports = 0;
7586 pdcch->dci.u.formatA2Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7587 pdcch->dci.u.formatA2Info.beamSwitch = 0;
7588 pdcch->dci.u.formatA2Info.SRS_Config = 0;
7589 pdcch->dci.u.formatA2Info.SRS_Symbol = 0;
7590 pdcch->dci.u.formatA2Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7591 pdcch->dci.u.formatA2Info.SCID = alloc->grnt.SCID;
7592 pdcch->dci.u.formatA2Info.PMI = alloc->grnt.PMI;
7593 pdcch->dci.u.formatA2Info.UL_PCRS = 0;
7594 pdcch->dci.u.formatA2Info.tpcCmd = alloc->grnt.tpc;
7598 RLOG1(L_ERROR," 5GTF_ERROR UL Allocator's icorrect "
7599 "dciForamt Fill RNTI:%d",alloc->rnti);
7607 /***********************************************************
7609 * Func : rgSCHCmnUlAllocFillTpc
7611 * Desc : Determines and fills TPC for an UE allocation.
7619 **********************************************************/
7621 PUBLIC Void rgSCHCmnUlAllocFillTpc
7628 PUBLIC Void rgSCHCmnUlAllocFillTpc(cell, ue, alloc)
7631 RgSchUlAlloc *alloc;
7634 TRC2(rgSCHCmnUlAllocFillTpc);
7635 alloc->grnt.tpc = rgSCHPwrPuschTpcForUe(cell, ue);
7640 /***********************************************************
7642 * Func : rgSCHCmnAddUeToRefreshQ
7644 * Desc : Adds a UE to refresh queue, so that the UE is
7645 * periodically triggered to refresh it's GBR and
7654 **********************************************************/
7656 PRIVATE Void rgSCHCmnAddUeToRefreshQ
7663 PRIVATE Void rgSCHCmnAddUeToRefreshQ(cell, ue, wait)
7669 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
7671 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
7673 TRC2(rgSCHCmnAddUeToRefreshQ);
7676 cmMemset((U8 *)&arg, 0, sizeof(arg));
7677 arg.tqCp = &sched->tmrTqCp;
7678 arg.tq = sched->tmrTq;
7679 arg.timers = &ueSchd->tmr;
7683 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
7690 * @brief Perform UE reset procedure.
7694 * Function : rgSCHCmnUlUeReset
7696 * This functions performs BSR resetting and
7697 * triggers UL specific scheduler
7698 * to Perform UE reset procedure.
7700 * @param[in] RgSchCellCb *cell
7701 * @param[in] RgSchUeCb *ue
7705 PRIVATE Void rgSCHCmnUlUeReset
7711 PRIVATE Void rgSCHCmnUlUeReset(cell, ue)
7716 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7717 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7719 RgSchCmnLcg *lcgCmn;
7721 RgSchCmnAllocRecord *allRcd;
7722 TRC2(rgSCHCmnUlUeReset);
7724 ue->ul.minReqBytes = 0;
7725 ue->ul.totalBsr = 0;
7727 ue->ul.nonGbrLcgBs = 0;
7728 ue->ul.effAmbr = ue->ul.cfgdAmbr;
7730 node = ueUl->ulAllocLst.first;
7733 allRcd = (RgSchCmnAllocRecord *)node->node;
7737 for(lcgCnt = 0; lcgCnt < RGSCH_MAX_LCG_PER_UE; lcgCnt++)
7739 lcgCmn = RG_SCH_CMN_GET_UL_LCG(&ue->ul.lcgArr[lcgCnt]);
7741 lcgCmn->reportedBs = 0;
7742 lcgCmn->effGbr = lcgCmn->cfgdGbr;
7743 lcgCmn->effDeltaMbr = lcgCmn->deltaMbr;
7745 rgSCHCmnUlUeDelAllocs(cell, ue);
7747 ue->isSrGrant = FALSE;
7749 cellSchd->apisUl->rgSCHUlUeReset(cell, ue);
7751 /* Stack Crash problem for TRACE5 changes. Added the return below */
7757 * @brief RESET UL CQI and DL CQI&RI to conservative values
7758 * for a reestablishing UE.
7762 * Function : rgSCHCmnResetRiCqi
7764 * RESET UL CQI and DL CQI&RI to conservative values
7765 * for a reestablishing UE
7767 * @param[in] RgSchCellCb *cell
7768 * @param[in] RgSchUeCb *ue
7772 PRIVATE Void rgSCHCmnResetRiCqi
7778 PRIVATE Void rgSCHCmnResetRiCqi(cell, ue)
7783 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7784 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
7785 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7786 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7788 TRC2(rgSCHCmnResetRiCqi);
7790 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
7791 cell->isCpUlExtend);
7793 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
7794 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
7795 ueDl->mimoInfo.ri = 1;
7796 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
7797 (ue->mimoInfo.txMode == RGR_UE_TM_6))
7799 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
7801 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
7803 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
7806 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
7808 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
7812 /* Request for an early Aper CQI in case of reest */
7813 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
7814 if(acqiCb && acqiCb->aCqiCfg.pres)
7816 acqiCb->aCqiTrigWt = 0;
7824 * @brief Perform UE reset procedure.
7828 * Function : rgSCHCmnDlUeReset
7830 * This functions performs BO resetting and
7831 * triggers DL specific scheduler
7832 * to Perform UE reset procedure.
7834 * @param[in] RgSchCellCb *cell
7835 * @param[in] RgSchUeCb *ue
7839 PRIVATE Void rgSCHCmnDlUeReset
7845 PRIVATE Void rgSCHCmnDlUeReset(cell, ue)
7850 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7851 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
7852 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7854 TRC2(rgSCHCmnDlUeReset);
7856 if (ueDl->rachInfo.poLnk.node != NULLP)
7858 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
7861 /* Fix: syed Remove from TA List if this UE is there.
7862 * If TA Timer is running. Stop it */
7863 if (ue->dlTaLnk.node)
7865 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
7866 ue->dlTaLnk.node = (PTR)NULLP;
7868 else if (ue->taTmr.tmrEvnt != TMR_NONE)
7870 rgSCHTmrStopTmr(cell, ue->taTmr.tmrEvnt, ue);
7873 cellSchd->apisDl->rgSCHDlUeReset(cell, ue);
7877 rgSCHSCellDlUeReset(cell,ue);
7883 * @brief Perform UE reset procedure.
7887 * Function : rgSCHCmnUeReset
7889 * This functions triggers specific scheduler
7890 * to Perform UE reset procedure.
7892 * @param[in] RgSchCellCb *cell
7893 * @param[in] RgSchUeCb *ue
7899 PUBLIC Void rgSCHCmnUeReset
7905 PUBLIC Void rgSCHCmnUeReset(cell, ue)
7912 RgInfResetHqEnt hqEntRstInfo;
7914 TRC2(rgSCHCmnUeReset);
7915 /* RACHO: remove UE from pdcch, handover and rapId assoc Qs */
7916 rgSCHCmnDelRachInfo(cell, ue);
7918 rgSCHPwrUeReset(cell, ue);
7920 rgSCHCmnUlUeReset(cell, ue);
7921 rgSCHCmnDlUeReset(cell, ue);
7924 /* Making allocCmnUlPdcch TRUE to allocate DCI0/1A from Common search space.
7925 As because multiple cells are added hence 2 bits CqiReq is there
7926 This flag will be set to FALSE once we will get Scell READY */
7927 ue->allocCmnUlPdcch = TRUE;
7930 /* Fix : syed RESET UL CQI and DL CQI&RI to conservative values
7931 * for a reestablishing UE */
7932 /*Reset Cqi Config for all the configured cells*/
7933 for (idx = 0;idx < CM_LTE_MAX_CELLS; idx++)
7935 if (ue->cellInfo[idx] != NULLP)
7937 rgSCHCmnResetRiCqi(ue->cellInfo[idx]->cell, ue);
7940 /*After Reset Trigger APCQI for Pcell*/
7941 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
7942 if(pCellInfo->acqiCb.aCqiCfg.pres)
7944 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
7947 /* sending HqEnt reset to MAC */
7948 hqEntRstInfo.cellId = cell->cellId;
7949 hqEntRstInfo.crnti = ue->ueId;
7951 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
7952 RgSchMacRstHqEnt(&pst,&hqEntRstInfo);
7958 * @brief UE out of MeasGap or AckNackReptn.
7962 * Function : rgSCHCmnActvtUlUe
7964 * This functions triggers specific scheduler
7965 * to start considering it for scheduling.
7967 * @param[in] RgSchCellCb *cell
7968 * @param[in] RgSchUeCb *ue
7974 PUBLIC Void rgSCHCmnActvtUlUe
7980 PUBLIC Void rgSCHCmnActvtUlUe(cell, ue)
7985 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7986 TRC2(rgSCHCmnActvtUlUe);
7988 /* : take care of this in UL retransmission */
7989 cellSchd->apisUl->rgSCHUlActvtUe(cell, ue);
7994 * @brief UE out of MeasGap or AckNackReptn.
7998 * Function : rgSCHCmnActvtDlUe
8000 * This functions triggers specific scheduler
8001 * to start considering it for scheduling.
8003 * @param[in] RgSchCellCb *cell
8004 * @param[in] RgSchUeCb *ue
8010 PUBLIC Void rgSCHCmnActvtDlUe
8016 PUBLIC Void rgSCHCmnActvtDlUe(cell, ue)
8021 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8022 TRC2(rgSCHCmnActvtDlUe);
8024 cellSchd->apisDl->rgSCHDlActvtUe(cell, ue);
8029 * @brief This API is invoked to indicate scheduler of a CRC indication.
8033 * Function : rgSCHCmnHdlUlTransInd
8034 * This API is invoked to indicate scheduler of a CRC indication.
8036 * @param[in] RgSchCellCb *cell
8037 * @param[in] RgSchUeCb *ue
8038 * @param[in] CmLteTimingInfo timingInfo
8043 PUBLIC Void rgSCHCmnHdlUlTransInd
8047 CmLteTimingInfo timingInfo
8050 PUBLIC Void rgSCHCmnHdlUlTransInd(cell, ue, timingInfo)
8053 CmLteTimingInfo timingInfo;
8056 TRC2(rgSCHCmnHdlUlTransInd);
8058 /* Update the latest UL dat/sig transmission time */
8059 RGSCHCPYTIMEINFO(timingInfo, ue->ul.ulTransTime);
8060 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
8062 /* Some UL Transmission from this UE.
8063 * Activate this UE if it was inactive */
8064 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8065 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8073 * @brief Compute the minimum Rank based on Codebook subset
8074 * restriction configuration for 4 Tx Ports and Tx Mode 4.
8078 * Function : rgSCHCmnComp4TxMode4
8080 * Depending on BitMap set at CBSR during Configuration
8081 * - return the least possible Rank
8084 * @param[in] U32 *pmiBitMap
8085 * @return RgSchCmnRank
8088 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4
8093 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4(pmiBitMap)
8097 U32 bitMap0, bitMap1;
8098 TRC2(rgSCHCmnComp4TxMode4);
8099 bitMap0 = pmiBitMap[0];
8100 bitMap1 = pmiBitMap[1];
8101 if((bitMap1) & 0xFFFF)
8103 RETVALUE (RG_SCH_CMN_RANK_1);
8105 else if((bitMap1>>16) & 0xFFFF)
8107 RETVALUE (RG_SCH_CMN_RANK_2);
8109 else if((bitMap0) & 0xFFFF)
8111 RETVALUE (RG_SCH_CMN_RANK_3);
8113 else if((bitMap0>>16) & 0xFFFF)
8115 RETVALUE (RG_SCH_CMN_RANK_4);
8119 RETVALUE (RG_SCH_CMN_RANK_1);
8125 * @brief Compute the minimum Rank based on Codebook subset
8126 * restriction configuration for 2 Tx Ports and Tx Mode 4.
8130 * Function : rgSCHCmnComp2TxMode4
8132 * Depending on BitMap set at CBSR during Configuration
8133 * - return the least possible Rank
8136 * @param[in] U32 *pmiBitMap
8137 * @return RgSchCmnRank
8140 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4
8145 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4(pmiBitMap)
8150 TRC2(rgSCHCmnComp2TxMode4);
8151 bitMap0 = pmiBitMap[0];
8152 if((bitMap0>>26)& 0x0F)
8154 RETVALUE (RG_SCH_CMN_RANK_1);
8156 else if((bitMap0>>30) & 3)
8158 RETVALUE (RG_SCH_CMN_RANK_2);
8162 RETVALUE (RG_SCH_CMN_RANK_1);
8167 * @brief Compute the minimum Rank based on Codebook subset
8168 * restriction configuration for 4 Tx Ports and Tx Mode 3.
8172 * Function : rgSCHCmnComp4TxMode3
8174 * Depending on BitMap set at CBSR during Configuration
8175 * - return the least possible Rank
8178 * @param[in] U32 *pmiBitMap
8179 * @return RgSchCmnRank
8182 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3
8187 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3(pmiBitMap)
8192 TRC2(rgSCHCmnComp4TxMode3);
8193 bitMap0 = pmiBitMap[0];
8194 if((bitMap0>>28)& 1)
8196 RETVALUE (RG_SCH_CMN_RANK_1);
8198 else if((bitMap0>>29) &1)
8200 RETVALUE (RG_SCH_CMN_RANK_2);
8202 else if((bitMap0>>30) &1)
8204 RETVALUE (RG_SCH_CMN_RANK_3);
8206 else if((bitMap0>>31) &1)
8208 RETVALUE (RG_SCH_CMN_RANK_4);
8212 RETVALUE (RG_SCH_CMN_RANK_1);
8217 * @brief Compute the minimum Rank based on Codebook subset
8218 * restriction configuration for 2 Tx Ports and Tx Mode 3.
8222 * Function : rgSCHCmnComp2TxMode3
8224 * Depending on BitMap set at CBSR during Configuration
8225 * - return the least possible Rank
8228 * @param[in] U32 *pmiBitMap
8229 * @return RgSchCmnRank
8232 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3
8237 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3(pmiBitMap)
8242 TRC2(rgSCHCmnComp2TxMode3);
8243 bitMap0 = pmiBitMap[0];
8244 if((bitMap0>>30)& 1)
8246 RETVALUE (RG_SCH_CMN_RANK_1);
8248 else if((bitMap0>>31) &1)
8250 RETVALUE (RG_SCH_CMN_RANK_2);
8254 RETVALUE (RG_SCH_CMN_RANK_1);
8259 * @brief Compute the minimum Rank based on Codebook subset
8260 * restriction configuration.
8264 * Function : rgSCHCmnComputeRank
8266 * Depending on Num Tx Ports and Transmission mode
8267 * - return the least possible Rank
8270 * @param[in] RgrTxMode txMode
8271 * @param[in] U32 *pmiBitMap
8272 * @param[in] U8 numTxPorts
8273 * @return RgSchCmnRank
8276 PRIVATE RgSchCmnRank rgSCHCmnComputeRank
8283 PRIVATE RgSchCmnRank rgSCHCmnComputeRank(txMode, pmiBitMap, numTxPorts)
8289 TRC2(rgSCHCmnComputeRank);
8291 if (numTxPorts ==2 && txMode == RGR_UE_TM_3)
8293 RETVALUE (rgSCHCmnComp2TxMode3(pmiBitMap));
8295 else if (numTxPorts ==4 && txMode == RGR_UE_TM_3)
8297 RETVALUE (rgSCHCmnComp4TxMode3(pmiBitMap));
8299 else if (numTxPorts ==2 && txMode == RGR_UE_TM_4)
8301 RETVALUE (rgSCHCmnComp2TxMode4(pmiBitMap));
8303 else if (numTxPorts ==4 && txMode == RGR_UE_TM_4)
8305 RETVALUE (rgSCHCmnComp4TxMode4(pmiBitMap));
8309 RETVALUE (RG_SCH_CMN_RANK_1);
8316 * @brief Harq Entity Deinitialization for CMN SCH.
8320 * Function : rgSCHCmnDlDeInitHqEnt
8322 * Harq Entity Deinitialization for CMN SCH
8324 * @param[in] RgSchCellCb *cell
8325 * @param[in] RgSchDlHqEnt *hqE
8328 /*KWORK_FIX:Changed function return type to void */
8330 PUBLIC Void rgSCHCmnDlDeInitHqEnt
8336 PUBLIC Void rgSCHCmnDlDeInitHqEnt(cell, hqE)
8341 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8342 RgSchDlHqProcCb *hqP;
8346 TRC2(rgSCHCmnDlDeInitHqEnt);
8348 ret = cellSchd->apisDl->rgSCHDlUeHqEntDeInit(cell, hqE);
8349 /* Free only If the Harq proc are created*/
8354 for(cnt = 0; cnt < hqE->numHqPrcs; cnt++)
8356 hqP = &hqE->procs[cnt];
8357 if ((RG_SCH_CMN_GET_DL_HQP(hqP)))
8359 rgSCHUtlFreeSBuf(cell->instIdx,
8360 (Data**)(&(hqP->sch)), (sizeof(RgSchCmnDlHqProc)));
8364 rgSCHLaaDeInitDlHqProcCb (cell, hqE);
8371 * @brief Harq Entity initialization for CMN SCH.
8375 * Function : rgSCHCmnDlInitHqEnt
8377 * Harq Entity initialization for CMN SCH
8379 * @param[in] RgSchCellCb *cell
8380 * @param[in] RgSchUeCb *ue
8386 PUBLIC S16 rgSCHCmnDlInitHqEnt
8392 PUBLIC S16 rgSCHCmnDlInitHqEnt(cell, hqEnt)
8394 RgSchDlHqEnt *hqEnt;
8398 RgSchDlHqProcCb *hqP;
8401 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8402 TRC2(rgSCHCmnDlInitHqEnt);
8404 for(cnt = 0; cnt < hqEnt->numHqPrcs; cnt++)
8406 hqP = &hqEnt->procs[cnt];
8407 if (rgSCHUtlAllocSBuf(cell->instIdx,
8408 (Data**)&(hqP->sch), (sizeof(RgSchCmnDlHqProc))) != ROK)
8414 if((cell->emtcEnable) &&(hqEnt->ue->isEmtcUe))
8416 if(ROK != cellSchd->apisEmtcDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8425 if(ROK != cellSchd->apisDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8432 } /* rgSCHCmnDlInitHqEnt */
8435 * @brief This function computes distribution of refresh period
8439 * Function: rgSCHCmnGetRefreshDist
8440 * Purpose: This function computes distribution of refresh period
8441 * This is required to align set of UEs refresh
8442 * around the different consecutive subframe.
8444 * Invoked by: rgSCHCmnGetRefreshPerDist
8446 * @param[in] RgSchCellCb *cell
8447 * @param[in] RgSchUeCb *ue
8452 PRIVATE U8 rgSCHCmnGetRefreshDist
8458 PRIVATE U8 rgSCHCmnGetRefreshDist(cell, ue)
8465 Inst inst = cell->instIdx;
8467 TRC2(rgSCHCmnGetRefreshDist);
8469 for(refOffst = 0; refOffst < RGSCH_MAX_REFRESH_OFFSET; refOffst++)
8471 if(cell->refreshUeCnt[refOffst] < RGSCH_MAX_REFRESH_GRPSZ)
8473 cell->refreshUeCnt[refOffst]++;
8474 ue->refreshOffset = refOffst;
8475 /* printf("UE[%d] refresh offset[%d]. Cell refresh ue count[%d].\n", ue->ueId, refOffst, cell->refreshUeCnt[refOffst]); */
8480 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Allocation of refresh distribution failed\n"));
8481 /* We should not enter here normally, but incase of failure, allocating from last offset*/
8482 cell->refreshUeCnt[refOffst-1]++;
8483 ue->refreshOffset = refOffst-1;
8485 RETVALUE(refOffst-1);
8488 * @brief This function computes initial Refresh Wait Period.
8492 * Function: rgSCHCmnGetRefreshPer
8493 * Purpose: This function computes initial Refresh Wait Period.
8494 * This is required to align multiple UEs refresh
8495 * around the same time.
8497 * Invoked by: rgSCHCmnGetRefreshPer
8499 * @param[in] RgSchCellCb *cell
8500 * @param[in] RgSchUeCb *ue
8501 * @param[in] U32 *waitPer
8506 PRIVATE Void rgSCHCmnGetRefreshPer
8513 PRIVATE Void rgSCHCmnGetRefreshPer(cell, ue, waitPer)
8522 TRC2(rgSCHCmnGetRefreshPer);
8524 refreshPer = RG_SCH_CMN_REFRESH_TIME * RG_SCH_CMN_REFRESH_TIMERES;
8525 crntSubFrm = cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot;
8526 /* Fix: syed align multiple UEs to refresh at same time */
8527 *waitPer = refreshPer - (crntSubFrm % refreshPer);
8528 *waitPer = RGSCH_CEIL(*waitPer, RG_SCH_CMN_REFRESH_TIMERES);
8529 *waitPer = *waitPer + rgSCHCmnGetRefreshDist(cell, ue);
8537 * @brief UE initialisation for scheduler.
8541 * Function : rgSCHCmnRgrSCellUeCfg
8543 * This functions intialises UE specific scheduler
8544 * information for SCELL
8545 * 0. Perform basic validations
8546 * 1. Allocate common sched UE cntrl blk
8547 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8549 * 4. Perform DLFS cfg
8551 * @param[in] RgSchCellCb *cell
8552 * @param[in] RgSchUeCb *ue
8553 * @param[out] RgSchErrInfo *err
8559 PUBLIC S16 rgSCHCmnRgrSCellUeCfg
8563 RgrUeSecCellCfg *sCellInfoCfg,
8567 PUBLIC S16 rgSCHCmnRgrSCellUeCfg(sCell, ue, sCellInfoCfg, err)
8570 RgrUeSecCellCfg *sCellInfoCfg;
8577 RgSchCmnAllocRecord *allRcd;
8578 RgSchDlRbAlloc *allocInfo;
8579 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8581 RgSchCmnUlUe *ueUlPcell;
8582 RgSchCmnUe *pCellUeSchCmn;
8583 RgSchCmnUe *ueSchCmn;
8585 RgSchCmnDlUe *pCellUeDl;
8587 Inst inst = ue->cell->instIdx;
8589 U32 idx = (U8)((sCell->cellId - rgSchCb[sCell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8590 TRC2(rgSCHCmnRgrSCellUeCfg);
8592 pCellUeSchCmn = RG_SCH_CMN_GET_UE(ue,ue->cell);
8593 pCellUeDl = &pCellUeSchCmn->dl;
8595 /* 1. Allocate Common sched control block */
8596 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8597 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8599 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Memory allocation FAILED\n"));
8600 err->errCause = RGSCHERR_SCH_CFG;
8603 ueSchCmn = RG_SCH_CMN_GET_UE(ue,sCell);
8605 /*2. Perform UEs downlink configuration */
8606 ueDl = &ueSchCmn->dl;
8609 ueDl->mimoInfo = pCellUeDl->mimoInfo;
8611 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
8612 (ue->mimoInfo.txMode == RGR_UE_TM_6))
8614 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_NO_PMI);
8616 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
8618 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_RI_1);
8620 RGSCH_ARRAY_BOUND_CHECK(sCell->instIdx, rgUeCatTbl, pCellUeSchCmn->cmn.ueCat);
8621 ueDl->maxTbBits = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlTbBits;
8624 ri = RGSCH_MIN(ri, sCell->numTxAntPorts);
8625 if(((CM_LTE_UE_CAT_6 == pCellUeSchCmn->cmn.ueCat )
8626 ||(CM_LTE_UE_CAT_7 == pCellUeSchCmn->cmn.ueCat))
8629 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[1];
8633 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[0];
8636 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
8638 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8639 rgSchTddDlNumHarqProcTbl[sCell->ulDlCfgIdx]);
8641 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8642 RGSCH_NUM_DL_HQ_PROC);
8645 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, ue->isEmtcUe);
8647 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, FALSE);
8651 /* ambrCfgd config moved to ueCb.dl, as it's not needed for per cell wise*/
8653 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, sCell);
8654 allocInfo->rnti = ue->ueId;
8656 /* Initializing the lastCfi value to current cfi value */
8657 ueDl->lastCfi = cellSchd->dl.currCfi;
8659 if ((cellSchd->apisDl->rgSCHRgrSCellDlUeCfg(sCell, ue, err)) != ROK)
8661 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Spec Sched DL UE CFG FAILED\n"));
8665 /* TODO: enhance for DLFS RB Allocation for SCELLs in future dev */
8667 /* DLFS UE Config */
8668 if (cellSchd->dl.isDlFreqSel)
8670 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeCfg(sCell, ue, sCellInfoCfg, err)) != ROK)
8672 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS UE config FAILED\n"));
8677 /* TODO: Do UL SCELL CFG during UL CA dev */
8679 ueUl = RG_SCH_CMN_GET_UL_UE(ue, sCell);
8681 /* TODO_ULCA: SRS for SCELL needs to be handled in the below function call */
8682 rgSCHCmnUpdUeUlCqiInfo(sCell, ue, ueUl, ueSchCmn, cellSchd,
8683 sCell->isCpUlExtend);
8685 ret = rgSCHUhmHqEntInit(sCell, ue);
8688 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL UHM HARQ Ent Init "
8689 "Failed for CRNTI:%d", ue->ueId);
8693 ueUlPcell = RG_SCH_CMN_GET_UL_UE(ue, ue->cell);
8694 /* Initialize uplink HARQ related information for UE */
8695 ueUl->hqEnt.maxHqRetx = ueUlPcell->hqEnt.maxHqRetx;
8696 cmLListInit(&ueUl->hqEnt.free);
8697 cmLListInit(&ueUl->hqEnt.inUse);
8698 for(i=0; i < ueUl->hqEnt.numHqPrcs; i++)
8700 ueUl->hqEnt.hqProcCb[i].hqEnt = (void*)(&ueUl->hqEnt);
8701 ueUl->hqEnt.hqProcCb[i].procId = i;
8702 ueUl->hqEnt.hqProcCb[i].ulSfIdx = RGSCH_INVALID_INFO;
8703 ueUl->hqEnt.hqProcCb[i].alloc = NULLP;
8705 /* ccpu00139513- Initializing SPS flags*/
8706 ueUl->hqEnt.hqProcCb[i].isSpsActvnHqP = FALSE;
8707 ueUl->hqEnt.hqProcCb[i].isSpsOccnHqP = FALSE;
8709 cmLListAdd2Tail(&ueUl->hqEnt.free, &ueUl->hqEnt.hqProcCb[i].lnk);
8710 ueUl->hqEnt.hqProcCb[i].lnk.node = (PTR)&ueUl->hqEnt.hqProcCb[i];
8713 /* Allocate UL BSR allocation tracking List */
8714 cmLListInit(&ueUl->ulAllocLst);
8716 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
8718 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8719 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
8721 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL Memory allocation FAILED"
8722 "for CRNTI:%d",ue->ueId);
8723 err->errCause = RGSCHERR_SCH_CFG;
8726 allRcd->allocTime = sCell->crntTime;
8727 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
8728 allRcd->lnk.node = (PTR)allRcd;
8731 /* After initialising UL part, do power related init */
8732 ret = rgSCHPwrUeSCellCfg(sCell, ue, sCellInfoCfg);
8735 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Could not do "
8736 "power config for UE CRNTI:%d",ue->ueId);
8741 if(TRUE == ue->isEmtcUe)
8743 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8745 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8746 "for CRNTI:%d",ue->ueId);
8753 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8755 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8756 "for CRNTI:%d",ue->ueId);
8761 ue->ul.isUlCaEnabled = TRUE;
8765 } /* rgSCHCmnRgrSCellUeCfg */
8769 * @brief UE initialisation for scheduler.
8773 * Function : rgSCHCmnRgrSCellUeDel
8775 * This functions Delete UE specific scheduler
8776 * information for SCELL
8778 * @param[in] RgSchCellCb *cell
8779 * @param[in] RgSchUeCb *ue
8785 PUBLIC S16 rgSCHCmnRgrSCellUeDel
8787 RgSchUeCellInfo *sCellInfo,
8791 PUBLIC S16 rgSCHCmnRgrSCellUeDel(sCellInfo, ue)
8792 RgSchUeCellInfo *sCellInfo;
8796 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8797 Inst inst = ue->cell->instIdx;
8799 TRC2(rgSCHCmnRgrSCellUeDel);
8801 cellSchd->apisDl->rgSCHRgrSCellDlUeDel(sCellInfo, ue);
8804 rgSCHCmnUlUeDelAllocs(sCellInfo->cell, ue);
8807 if(TRUE == ue->isEmtcUe)
8809 cellSchd->apisEmtcUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8814 cellSchd->apisUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8817 /* DLFS UE Config */
8818 if (cellSchd->dl.isDlFreqSel)
8820 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeDel(sCellInfo->cell, ue)) != ROK)
8822 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS Scell del FAILED\n"));
8827 rgSCHUtlFreeSBuf(sCellInfo->cell->instIdx,
8828 (Data**)(&(sCellInfo->sch)), (sizeof(RgSchCmnUe)));
8832 } /* rgSCHCmnRgrSCellUeDel */
8838 * @brief Handles 5gtf configuration for a UE
8842 * Function : rgSCHCmn5gtfUeCfg
8848 * @param[in] RgSchCellCb *cell
8849 * @param[in] RgSchUeCb *ue
8850 * @param[in] RgrUeCfg *cfg
8856 PUBLIC S16 rgSCHCmn5gtfUeCfg
8863 PUBLIC S16 rgSCHCmn5gtfUeCfg(cell, ue, cfg)
8869 TRC2(rgSCHCmnRgrUeCfg);
8871 RgSchUeGrp *ue5gtfGrp;
8872 ue->ue5gtfCb.grpId = cfg->ue5gtfCfg.grpId;
8873 ue->ue5gtfCb.BeamId = cfg->ue5gtfCfg.BeamId;
8874 ue->ue5gtfCb.numCC = cfg->ue5gtfCfg.numCC;
8875 ue->ue5gtfCb.mcs = cfg->ue5gtfCfg.mcs;
8876 ue->ue5gtfCb.maxPrb = cfg->ue5gtfCfg.maxPrb;
8878 ue->ue5gtfCb.cqiRiPer = 100;
8879 /* 5gtf TODO: CQIs to start from (10,0)*/
8880 ue->ue5gtfCb.nxtCqiRiOccn.sfn = 10;
8881 ue->ue5gtfCb.nxtCqiRiOccn.slot = 0;
8882 ue->ue5gtfCb.rank = 1;
8884 printf("\nschd cfg at mac,%u,%u,%u,%u,%u\n",ue->ue5gtfCb.grpId,ue->ue5gtfCb.BeamId,ue->ue5gtfCb.numCC,
8885 ue->ue5gtfCb.mcs,ue->ue5gtfCb.maxPrb);
8887 ue5gtfGrp = &(cell->cell5gtfCb.ueGrp5gConf[ue->ue5gtfCb.BeamId]);
8889 /* TODO_5GTF: Currently handling 1 group only. Need to update when multi group
8890 scheduling comes into picture */
8891 if(ue5gtfGrp->beamBitMask & (1 << ue->ue5gtfCb.BeamId))
8893 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8894 "5GTF_ERROR Invalid beam id CRNTI:%d",cfg->crnti);
8897 ue5gtfGrp->beamBitMask |= (1 << ue->ue5gtfCb.BeamId);
8904 * @brief UE initialisation for scheduler.
8908 * Function : rgSCHCmnRgrUeCfg
8910 * This functions intialises UE specific scheduler
8912 * 0. Perform basic validations
8913 * 1. Allocate common sched UE cntrl blk
8914 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8916 * 4. Perform DLFS cfg
8918 * @param[in] RgSchCellCb *cell
8919 * @param[in] RgSchUeCb *ue
8920 * @param[int] RgrUeCfg *ueCfg
8921 * @param[out] RgSchErrInfo *err
8927 PUBLIC S16 rgSCHCmnRgrUeCfg
8935 PUBLIC S16 rgSCHCmnRgrUeCfg(cell, ue, ueCfg, err)
8942 RgSchDlRbAlloc *allocInfo;
8944 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8945 RgSchCmnUe *ueSchCmn;
8949 RgSchCmnAllocRecord *allRcd;
8951 U32 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8952 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
8953 TRC2(rgSCHCmnRgrUeCfg);
8956 /* 1. Allocate Common sched control block */
8957 if((rgSCHUtlAllocSBuf(cell->instIdx,
8958 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8960 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8961 "Memory allocation FAILED for CRNTI:%d",ueCfg->crnti);
8962 err->errCause = RGSCHERR_SCH_CFG;
8965 ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
8966 ue->dl.ueDlCqiCfg = ueCfg->ueDlCqiCfg;
8967 pCellInfo->acqiCb.aCqiCfg = ueCfg->ueDlCqiCfg.aprdCqiCfg;
8968 if(ueCfg->ueCatEnum > 0 )
8970 /*KWORK_FIX removed NULL chk for ueSchCmn*/
8971 ueSchCmn->cmn.ueCat = ueCfg->ueCatEnum - 1;
8975 ueSchCmn->cmn.ueCat = 0; /* Assuming enum values correctly set */
8977 cmInitTimers(&ueSchCmn->cmn.tmr, 1);
8979 /*2. Perform UEs downlink configuration */
8980 ueDl = &ueSchCmn->dl;
8981 /* RACHO : store the rapId assigned for HandOver UE.
8982 * Append UE to handover list of cmnCell */
8983 if (ueCfg->dedPreambleId.pres == PRSNT_NODEF)
8985 rgSCHCmnDelDedPreamble(cell, ueCfg->dedPreambleId.val);
8986 ueDl->rachInfo.hoRapId = ueCfg->dedPreambleId.val;
8987 cmLListAdd2Tail(&cellSchd->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
8988 ueDl->rachInfo.hoLnk.node = (PTR)ue;
8991 rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd);
8993 if (ueCfg->txMode.pres == TRUE)
8995 if ((ueCfg->txMode.txModeEnum == RGR_UE_TM_4) ||
8996 (ueCfg->txMode.txModeEnum == RGR_UE_TM_6))
8998 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9000 if (ueCfg->txMode.txModeEnum == RGR_UE_TM_3)
9002 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9005 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
9006 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
9009 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9010 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9011 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9014 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9018 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9021 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
9023 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9024 rgSchTddDlNumHarqProcTbl[cell->ulDlCfgIdx]);
9026 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9027 RGSCH_NUM_DL_HQ_PROC);
9030 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
9032 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
9034 /* if none of the DL and UL AMBR are configured then fail the configuration
9036 if((ueCfg->ueQosCfg.dlAmbr == 0) && (ueCfg->ueQosCfg.ueBr == 0))
9038 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"UL Ambr and DL Ambr are"
9039 "configured as 0 for CRNTI:%d",ueCfg->crnti);
9040 err->errCause = RGSCHERR_SCH_CFG;
9044 ue->dl.ambrCfgd = (ueCfg->ueQosCfg.dlAmbr * RG_SCH_CMN_REFRESH_TIME)/100;
9046 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
9047 allocInfo->rnti = ue->ueId;
9049 /* Initializing the lastCfi value to current cfi value */
9050 ueDl->lastCfi = cellSchd->dl.currCfi;
9052 if(cell->emtcEnable && ue->isEmtcUe)
9054 if ((cellSchd->apisEmtcDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9056 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9057 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9065 if ((cellSchd->apisDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9067 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9068 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9075 /* 3. Initialize ul part */
9076 ueUl = &ueSchCmn->ul;
9078 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
9079 cell->isCpUlExtend);
9081 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9082 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9084 ue->ul.cfgdAmbr = (ueCfg->ueQosCfg.ueBr * RG_SCH_CMN_REFRESH_TIME)/100;
9085 ue->ul.effAmbr = ue->ul.cfgdAmbr;
9086 RGSCHCPYTIMEINFO(cell->crntTime, ue->ul.ulTransTime);
9088 /* Allocate UL BSR allocation tracking List */
9089 cmLListInit(&ueUl->ulAllocLst);
9091 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
9093 if((rgSCHUtlAllocSBuf(cell->instIdx,
9094 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
9096 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation FAILED"
9097 "for CRNTI:%d",ueCfg->crnti);
9098 err->errCause = RGSCHERR_SCH_CFG;
9101 allRcd->allocTime = cell->crntTime;
9102 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
9103 allRcd->lnk.node = (PTR)allRcd;
9105 /* Allocate common sch cntrl blocks for LCGs */
9106 for (cnt=0; cnt<RGSCH_MAX_LCG_PER_UE; cnt++)
9108 ret = rgSCHUtlAllocSBuf(cell->instIdx,
9109 (Data**)&(ue->ul.lcgArr[cnt].sch), (sizeof(RgSchCmnLcg)));
9112 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9113 "SCH struct alloc failed for CRNTI:%d",ueCfg->crnti);
9114 err->errCause = RGSCHERR_SCH_CFG;
9118 /* After initialising UL part, do power related init */
9119 ret = rgSCHPwrUeCfg(cell, ue, ueCfg);
9122 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9123 "power config for UE CRNTI:%d",ueCfg->crnti);
9127 ret = rgSCHCmnSpsUeCfg(cell, ue, ueCfg, err);
9130 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9131 "SPS config for CRNTI:%d",ueCfg->crnti);
9134 #endif /* LTEMAC_SPS */
9137 if(TRUE == ue->isEmtcUe)
9139 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9141 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9142 "for CRNTI:%d",ueCfg->crnti);
9149 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9151 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9152 "for CRNTI:%d",ueCfg->crnti);
9157 /* DLFS UE Config */
9158 if (cellSchd->dl.isDlFreqSel)
9160 if ((cellSchd->apisDlfs->rgSCHDlfsUeCfg(cell, ue, ueCfg, err)) != ROK)
9162 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "DLFS UE config FAILED"
9163 "for CRNTI:%d",ueCfg->crnti);
9168 /* Fix: syed align multiple UEs to refresh at same time */
9169 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9170 /* Start UE Qos Refresh Timer */
9171 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9173 rgSCHCmn5gtfUeCfg(cell, ue, ueCfg);
9177 } /* rgSCHCmnRgrUeCfg */
9180 * @brief UE TX mode reconfiguration handler.
9184 * Function : rgSCHCmnDlHdlTxModeRecfg
9186 * This functions updates UE specific scheduler
9187 * information upon UE reconfiguration.
9189 * @param[in] RgSchUeCb *ue
9190 * @param[in] RgrUeRecfg *ueRecfg
9195 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9199 RgrUeRecfg *ueRecfg,
9203 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, numTxPorts)
9206 RgrUeRecfg *ueRecfg;
9211 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9218 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg)
9221 RgrUeRecfg *ueRecfg;
9225 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9226 TRC2(rgSCHCmnDlHdlTxModeRecfg);
9228 if (ueRecfg->txMode.pres != PRSNT_NODEF)
9232 /* ccpu00140894- Starting Timer for TxMode Transition Completion*/
9233 ue->txModeTransCmplt =FALSE;
9234 rgSCHTmrStartTmr (ue->cell, ue, RG_SCH_TMR_TXMODE_TRNSTN, RG_SCH_TXMODE_TRANS_TIMER);
9235 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_CMPLT)
9237 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell,
9238 RG_SCH_CMN_TD_TXMODE_RECFG);
9239 /* MS_WORKAROUND for ccpu00123186 MIMO Fix Start: need to set FORCE TD bitmap based on TX mode */
9240 ueDl->mimoInfo.ri = 1;
9241 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9242 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9244 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9246 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9248 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9250 /* MIMO Fix End: need to set FORCE TD bitmap based on TX mode */
9253 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_START)
9255 /* start afresh forceTD masking */
9256 RG_SCH_CMN_INIT_FORCE_TD(ue, cell, 0);
9257 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_TXMODE_RECFG);
9258 /* Intialize MIMO related parameters of UE */
9261 if(ueRecfg->txMode.pres)
9263 if((ueRecfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9264 (ueRecfg->txMode.txModeEnum ==RGR_UE_TM_4))
9266 if(ueRecfg->ueCodeBookRstRecfg.pres)
9269 rgSCHCmnComputeRank(ueRecfg->txMode.txModeEnum,
9270 ueRecfg->ueCodeBookRstRecfg.pmiBitMap, numTxPorts);
9274 ueDl->mimoInfo.ri = 1;
9279 ueDl->mimoInfo.ri = 1;
9284 ueDl->mimoInfo.ri = 1;
9287 ueDl->mimoInfo.ri = 1;
9288 #endif /* TFU_UPGRADE */
9289 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9290 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9292 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9294 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9296 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9301 /***********************************************************
9303 * Func : rgSCHCmnUpdUeMimoInfo
9305 * Desc : Updates UL and DL Ue Information
9313 **********************************************************/
9315 PRIVATE Void rgSCHCmnUpdUeMimoInfo
9320 RgSchCmnCell *cellSchd
9323 PRIVATE Void rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd)
9327 RgSchCmnCell *cellSchd;
9330 TRC2(rgSCHCmnUpdUeMimoInfo)
9332 if(ueCfg->txMode.pres)
9334 if((ueCfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9335 (ueCfg->txMode.txModeEnum ==RGR_UE_TM_4))
9337 if(ueCfg->ueCodeBookRstCfg.pres)
9340 rgSCHCmnComputeRank(ueCfg->txMode.txModeEnum,
9341 ueCfg->ueCodeBookRstCfg.pmiBitMap, cell->numTxAntPorts);
9345 ueDl->mimoInfo.ri = 1;
9350 ueDl->mimoInfo.ri = 1;
9355 ueDl->mimoInfo.ri = 1;
9359 ueDl->mimoInfo.ri = 1;
9360 #endif /*TFU_UPGRADE */
9361 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
9362 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
9366 /***********************************************************
9368 * Func : rgSCHCmnUpdUeUlCqiInfo
9370 * Desc : Updates UL and DL Ue Information
9378 **********************************************************/
9380 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo
9385 RgSchCmnUe *ueSchCmn,
9386 RgSchCmnCell *cellSchd,
9390 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd, isEcp)
9394 RgSchCmnUe *ueSchCmn;
9395 RgSchCmnCell *cellSchd;
9400 TRC2(rgSCHCmnUpdUeUlCqiInfo)
9403 if(ue->srsCb.srsCfg.type == RGR_SCH_SRS_SETUP)
9405 if(ue->ul.ulTxAntSel.pres)
9407 ueUl->crntUlCqi[ue->srsCb.selectedAnt] = cellSchd->ul.dfltUlCqi;
9408 ueUl->validUlCqi = ueUl->crntUlCqi[ue->srsCb.selectedAnt];
9412 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9413 ueUl->validUlCqi = ueUl->crntUlCqi[0];
9415 ue->validTxAnt = ue->srsCb.selectedAnt;
9419 ueUl->validUlCqi = cellSchd->ul.dfltUlCqi;
9423 ueUl->ulLaCb.cqiBasediTbs = rgSchCmnUlCqiToTbsTbl[isEcp]
9424 [ueUl->validUlCqi] * 100;
9425 ueUl->ulLaCb.deltaiTbs = 0;
9429 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9430 #endif /*TFU_UPGRADE */
9431 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
9432 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9434 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9438 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9443 /***********************************************************
9445 * Func : rgSCHCmnUpdUeCatCfg
9447 * Desc : Updates UL and DL Ue Information
9455 **********************************************************/
9457 PRIVATE Void rgSCHCmnUpdUeCatCfg
9463 PRIVATE Void rgSCHCmnUpdUeCatCfg(ue, cell)
9468 RgSchDlHqEnt *hqE = NULLP;
9469 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9470 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9471 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
9472 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
9474 TRC2(rgSCHCmnUpdUeCatCfg)
9476 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
9478 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9481 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9482 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9483 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9484 && (RG_SCH_MAX_TX_LYRS_4 == ri))
9486 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9490 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9493 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9495 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9497 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9501 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9503 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9504 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9509 * @brief UE reconfiguration for scheduler.
9513 * Function : rgSChCmnRgrUeRecfg
9515 * This functions updates UE specific scheduler
9516 * information upon UE reconfiguration.
9518 * @param[in] RgSchCellCb *cell
9519 * @param[in] RgSchUeCb *ue
9520 * @param[int] RgrUeRecfg *ueRecfg
9521 * @param[out] RgSchErrInfo *err
9527 PUBLIC S16 rgSCHCmnRgrUeRecfg
9531 RgrUeRecfg *ueRecfg,
9535 PUBLIC S16 rgSCHCmnRgrUeRecfg(cell, ue, ueRecfg, err)
9538 RgrUeRecfg *ueRecfg;
9542 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9545 TRC2(rgSCHCmnRgrUeRecfg);
9546 /* Basic validations */
9547 if (ueRecfg->ueRecfgTypes & RGR_UE_TXMODE_RECFG)
9550 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, cell->numTxAntPorts);
9552 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg);
9553 #endif /* TFU_UPGRADE */
9555 if(ueRecfg->ueRecfgTypes & RGR_UE_CSG_PARAM_RECFG)
9557 ue->csgMmbrSta = ueRecfg->csgMmbrSta;
9559 /* Changes for UE Category reconfiguration feature */
9560 if(ueRecfg->ueRecfgTypes & RGR_UE_UECAT_RECFG)
9562 rgSCHCmnUpdUeCatCfg(ue, cell);
9564 if (ueRecfg->ueRecfgTypes & RGR_UE_APRD_DLCQI_RECFG)
9566 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
9567 pCellInfo->acqiCb.aCqiCfg = ueRecfg->aprdDlCqiRecfg;
9570 if (ueRecfg->ueRecfgTypes & RGR_UE_PRD_DLCQI_RECFG)
9572 if ((ueRecfg->prdDlCqiRecfg.pres == TRUE)
9573 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD10)
9574 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD20))
9576 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unsupported periodic CQI "
9577 "reporting mode %d for old CRNIT:%d",
9578 (int)ueRecfg->prdDlCqiRecfg.prdModeEnum,ueRecfg->oldCrnti);
9579 err->errCause = RGSCHERR_SCH_CFG;
9582 ue->dl.ueDlCqiCfg.prdCqiCfg = ueRecfg->prdDlCqiRecfg;
9586 if (ueRecfg->ueRecfgTypes & RGR_UE_ULPWR_RECFG)
9588 if (rgSCHPwrUeRecfg(cell, ue, ueRecfg) != ROK)
9590 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9591 "Power Reconfiguration Failed for OLD CRNTI:%d",ueRecfg->oldCrnti);
9596 if (ueRecfg->ueRecfgTypes & RGR_UE_QOS_RECFG)
9598 /* Uplink Sched related Initialization */
9599 if ((ueRecfg->ueQosRecfg.dlAmbr == 0) && (ueRecfg->ueQosRecfg.ueBr == 0))
9601 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Ul Ambr and DL Ambr "
9602 "configured as 0 for OLD CRNTI:%d",ueRecfg->oldCrnti);
9603 err->errCause = RGSCHERR_SCH_CFG;
9606 ue->ul.cfgdAmbr = (ueRecfg->ueQosRecfg.ueBr * \
9607 RG_SCH_CMN_REFRESH_TIME)/100;
9608 /* Downlink Sched related Initialization */
9609 ue->dl.ambrCfgd = (ueRecfg->ueQosRecfg.dlAmbr * \
9610 RG_SCH_CMN_REFRESH_TIME)/100;
9611 /* Fix: syed Update the effAmbr and effUeBR fields w.r.t the
9612 * new QOS configuration */
9613 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9614 /* Fix: syed align multiple UEs to refresh at same time */
9615 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9616 rgSCHCmnApplyUeRefresh(cell, ue);
9617 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9620 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9622 if ((cellSchCmn->apisEmtcUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9624 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9625 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9628 if ((cellSchCmn->apisEmtcDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9630 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9631 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9638 if ((cellSchCmn->apisUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9640 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9641 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9644 if ((cellSchCmn->apisDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9646 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9647 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9651 /* DLFS UE Config */
9652 if (cellSchCmn->dl.isDlFreqSel)
9654 if ((cellSchCmn->apisDlfs->rgSCHDlfsUeRecfg(cell, ue, \
9655 ueRecfg, err)) != ROK)
9657 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9658 "DLFS UE re-config FAILED for CRNTI:%d",ue->ueId);
9664 /* Invoke re-configuration on SPS module */
9665 if (rgSCHCmnSpsUeRecfg(cell, ue, ueRecfg, err) != ROK)
9667 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9668 "DL SPS ReCFG FAILED for UE CRNTI:%d", ue->ueId);
9674 } /* rgSCHCmnRgrUeRecfg*/
9676 /***********************************************************
9678 * Func : rgSCHCmnUlUeDelAllocs
9680 * Desc : Deletion of all UE allocations.
9688 **********************************************************/
9690 PRIVATE Void rgSCHCmnUlUeDelAllocs
9696 PRIVATE Void rgSCHCmnUlUeDelAllocs(cell, ue)
9701 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
9702 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
9705 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
9707 TRC2(rgSCHCmnUlUeDelAllocs);
9709 for (i = 0; i < ueUl->hqEnt.numHqPrcs; ++i)
9711 RgSchUlHqProcCb *proc = rgSCHUhmGetUlHqProc(cell, ue, i);
9714 /* proc can't be NULL here */
9722 /* Added Insure Fixes Of reading Dangling memory.NULLed crntAlloc */
9724 if(proc->alloc == ulSpsUe->ulSpsSchdInfo.crntAlloc)
9726 ulSpsUe->ulSpsSchdInfo.crntAlloc = NULLP;
9727 ulSpsUe->ulSpsSchdInfo.crntAllocSf = NULLP;
9731 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9732 proc->alloc,ue->isEmtcUe);
9734 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9737 /* PHY probably needn't be intimated since
9738 * whatever intimation it needs happens at the last minute
9741 /* Fix: syed Adaptive Msg3 Retx crash. Remove the harqProc
9742 * from adaptive retx List. */
9743 if (proc->reTxLnk.node)
9746 //TODO_SID: Need to take care
9747 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
9748 proc->reTxLnk.node = (PTR)NULLP;
9756 /***********************************************************
9758 * Func : rgSCHCmnDelUeFrmRefreshQ
9760 * Desc : Adds a UE to refresh queue, so that the UE is
9761 * periodically triggered to refresh it's GBR and
9770 **********************************************************/
9772 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ
9778 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ(cell, ue)
9783 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
9785 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
9787 TRC2(rgSCHCmnDelUeFrmRefreshQ);
9789 #ifdef RGL_SPECIFIC_CHANGES
9790 if(ue->refreshOffset < RGSCH_MAX_REFRESH_GRPSZ)
9792 if(cell->refreshUeCnt[ue->refreshOffset])
9794 cell->refreshUeCnt[ue->refreshOffset]--;
9800 cmMemset((U8 *)&arg, 0, sizeof(arg));
9801 arg.tqCp = &sched->tmrTqCp;
9802 arg.tq = sched->tmrTq;
9803 arg.timers = &ueSchd->tmr;
9807 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
9813 /***********************************************************
9815 * Func : rgSCHCmnUeCcchSduDel
9817 * Desc : Clear CCCH SDU scheduling context.
9825 **********************************************************/
9827 PRIVATE Void rgSCHCmnUeCcchSduDel
9833 PRIVATE Void rgSCHCmnUeCcchSduDel(cell, ueCb)
9838 RgSchDlHqEnt *hqE = NULLP;
9839 RgSchDlHqProcCb *ccchSduHqP = NULLP;
9840 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
9842 TRC2(rgSCHCmnUeCcchSduDel);
9844 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
9849 ccchSduHqP = hqE->ccchSduProc;
9850 if(ueCb->ccchSduLnk.node != NULLP)
9852 /* Remove the ccchSduProc if it is in the Tx list */
9853 cmLListDelFrm(&(cell->ccchSduUeLst), &(ueCb->ccchSduLnk));
9854 ueCb->ccchSduLnk.node = NULLP;
9856 else if(ccchSduHqP != NULLP)
9858 /* Fix for crash due to stale pdcch. Release ccch pdcch*/
9859 if(ccchSduHqP->pdcch)
9861 cmLListDelFrm(&ccchSduHqP->subFrm->pdcchInfo.pdcchs,
9862 &ccchSduHqP->pdcch->lnk);
9863 cmLListAdd2Tail(&cell->pdcchLst, &ccchSduHqP->pdcch->lnk);
9864 ccchSduHqP->pdcch = NULLP;
9866 if(ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node != NULLP)
9868 /* Remove the ccchSduProc if it is in the retx list */
9869 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
9870 &ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk);
9871 /* ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node = NULLP; */
9872 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9874 else if ((ccchSduHqP->subFrm != NULLP) &&
9875 (ccchSduHqP->hqPSfLnk.node != NULLP))
9877 rgSCHUtlDlHqPTbRmvFrmTx(ccchSduHqP->subFrm,
9878 ccchSduHqP, 0, FALSE);
9879 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9889 * @brief UE deletion for scheduler.
9893 * Function : rgSCHCmnUeDel
9895 * This functions deletes all scheduler information
9896 * pertaining to an UE.
9898 * @param[in] RgSchCellCb *cell
9899 * @param[in] RgSchUeCb *ue
9903 PUBLIC Void rgSCHCmnUeDel
9909 PUBLIC Void rgSCHCmnUeDel(cell, ue)
9914 RgSchDlHqEnt *hqE = NULLP;
9915 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9917 RgSchCmnAllocRecord *allRcd;
9919 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9921 TRC2(rgSCHCmnUeDel);
9923 if (RG_SCH_CMN_GET_UE(ue,cell) == NULLP)
9925 /* Common scheduler config has not happened yet */
9928 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9931 /* UE Free can be triggered before MSG4 done when dlHqE is not updated */
9935 rgSCHEmtcCmnUeCcchSduDel(cell, ue);
9940 rgSCHCmnUeCcchSduDel(cell, ue);
9943 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9945 rgSCHCmnUlUeDelAllocs(cell, ue);
9947 rgSCHCmnDelRachInfo(cell, ue);
9950 if(TRUE == ue->isEmtcUe)
9952 cellSchCmn->apisEmtcUl->rgSCHFreeUlUe(cell, ue);
9957 cellSchCmn->apisUl->rgSCHFreeUlUe(cell, ue);
9962 for(idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
9964 if(ue->cellInfo[idx] != NULLP)
9966 rgSCHSCellDelUeSCell(cell,ue,idx);
9973 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9975 cellSchCmn->apisEmtcDl->rgSCHFreeDlUe(cell, ue);
9980 cellSchCmn->apisDl->rgSCHFreeDlUe(cell, ue);
9982 rgSCHPwrUeDel(cell, ue);
9985 rgSCHCmnSpsUeDel(cell, ue);
9986 #endif /* LTEMAC_SPS*/
9989 rgSchCmnDlSfHqDel(ue, cell);
9991 /* DLFS UE delete */
9992 if (cellSchCmn->dl.isDlFreqSel)
9994 cellSchCmn->apisDlfs->rgSCHDlfsUeDel(cell, ue);
9996 node = ueUl->ulAllocLst.first;
9998 /* ccpu00117052 - MOD - Passing double pointer in all the places of
9999 rgSCHUtlFreeSBuf function call for proper NULLP assignment*/
10002 allRcd = (RgSchCmnAllocRecord *)node->node;
10004 cmLListDelFrm(&ueUl->ulAllocLst, &allRcd->lnk);
10005 rgSCHUtlFreeSBuf(cell->instIdx,
10006 (Data**)(&allRcd), (sizeof(RgSchCmnAllocRecord)));
10009 for(cnt = 0; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
10011 if (ue->ul.lcgArr[cnt].sch != NULLP)
10013 rgSCHUtlFreeSBuf(cell->instIdx,
10014 (Data**)(&(ue->ul.lcgArr[cnt].sch)), (sizeof(RgSchCmnLcg)));
10018 /* Fix : syed Moved hqEnt deinit to rgSCHCmnDlDeInitHqEnt */
10019 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId) & (CM_LTE_MAX_CELLS - 1));
10020 rgSCHUtlFreeSBuf(cell->instIdx,
10021 (Data**)(&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch))), (sizeof(RgSchCmnUe)));
10023 } /* rgSCHCmnUeDel */
10027 * @brief This function handles the common code rate configurations
10028 * done as part of RgrCellCfg/RgrCellRecfg.
10032 * Function: rgSCHCmnDlCnsdrCmnRt
10033 * Purpose: This function handles the common code rate configurations
10034 * done as part of RgrCellCfg/RgrCellRecfg.
10036 * Invoked by: Scheduler
10038 * @param[in] RgSchCellCb *cell
10039 * @param[in] RgrDlCmnCodeRateCfg *dlCmnCodeRate
10044 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt
10047 RgrDlCmnCodeRateCfg *dlCmnCodeRate
10050 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt(cell, dlCmnCodeRate)
10052 RgrDlCmnCodeRateCfg *dlCmnCodeRate;
10055 RgSchCmnCell *cellDl = RG_SCH_CMN_GET_CELL(cell);
10062 TRC2(rgSCHCmnDlCnsdrCmnRt);
10064 /* code rate is bits per 1024 phy bits, since modl'n scheme is 2. it is
10065 * bits per 1024/2 REs */
10066 if (dlCmnCodeRate->bcchPchRaCodeRate != 0)
10068 bitsPerRb = ((dlCmnCodeRate->bcchPchRaCodeRate * 2) *
10069 cellDl->dl.noResPerRb[3])/1024;
10073 bitsPerRb = ((RG_SCH_CMN_DEF_BCCHPCCH_CODERATE * 2) *
10074 cellDl->dl.noResPerRb[3])/1024;
10076 /* Store bitsPerRb in cellDl->dl to use later to determine
10077 * Number of RBs for UEs with SI-RNTI, P-RNTI and RA-RNTI */
10078 cellDl->dl.bitsPerRb = bitsPerRb;
10079 /* ccpu00115595 end*/
10080 /* calculate the ITbs for 2 RBs. Initialize ITbs to MAX value */
10083 bitsPer2Rb = bitsPerRb * rbNum;
10084 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer2Rb))
10087 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs2Rbs = 0) :
10088 (cellDl->dl.cmnChITbs.iTbs2Rbs = i-1);
10090 /* calculate the ITbs for 3 RBs. Initialize ITbs to MAX value */
10093 bitsPer3Rb = bitsPerRb * rbNum;
10094 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer3Rb))
10097 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs3Rbs = 0) :
10098 (cellDl->dl.cmnChITbs.iTbs3Rbs = i-1);
10101 pdcchBits = 1 + /* Flag for format0/format1a differentiation */
10102 1 + /* Localized/distributed VRB assignment flag */
10105 3 + /* Harq process Id */
10107 4 + /* Harq process Id */
10108 2 + /* UL Index or DAI */
10110 1 + /* New Data Indicator */
10113 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
10114 (cell->bwCfg.dlTotalBw + 1))/2);
10115 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
10116 Since VRB is local */
10117 /* For TDD consider DAI */
10119 /* Convert the pdcchBits to actual pdcchBits required for transmission */
10120 if (dlCmnCodeRate->pdcchCodeRate != 0)
10122 pdcchBits = (pdcchBits * 1024)/dlCmnCodeRate->pdcchCodeRate;
10123 if (pdcchBits <= 288) /* 288 : Num of pdcch bits for aggrLvl=4 */
10125 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10127 else /* 576 : Num of pdcch bits for aggrLvl=8 */
10129 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL8;
10134 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10136 if (dlCmnCodeRate->ccchCqi == 0)
10142 cellDl->dl.ccchCqi = dlCmnCodeRate->ccchCqi;
10149 * @brief This function handles the configuration of cell for the first
10150 * time by the scheduler.
10154 * Function: rgSCHCmnDlRgrCellCfg
10155 * Purpose: Configuration received is stored into the data structures
10156 * Also, update the scheduler with the number of frames of
10157 * RACH preamble transmission.
10159 * Invoked by: BO and Scheduler
10161 * @param[in] RgSchCellCb* cell
10162 * @param[in] RgrCellCfg* cfg
10167 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10174 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10180 RgSchCmnCell *cellSch;
10185 U8 maxDlSubfrms = cell->numDlSubfrms;
10186 U8 splSubfrmIdx = cfg->spclSfCfgIdx;
10189 RgSchTddSubfrmInfo subfrmInfo = rgSchTddMaxUlSubfrmTbl[cell->ulDlCfgIdx];
10200 TRC2(rgSCHCmnDlRgrCellCfg);
10203 cellSch = RG_SCH_CMN_GET_CELL(cell);
10204 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->\
10205 rachCfg.preambleFormat];
10206 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10207 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10209 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10210 3 TTI (MAX L1+L2 processing delay at the UE) */
10211 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10212 rgSchCmnHarqRtt[cell->ulDlCfgIdx] + 3;
10213 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10214 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10215 if (cfg->maxUePerDlSf == 0)
10217 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10219 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10225 if (cell->bwCfg.dlTotalBw <= 10)
10235 /* DwPTS Scheduling Changes Start */
10236 cellSch->dl.splSfCfg = splSubfrmIdx;
10238 if (cfg->isCpDlExtend == TRUE)
10240 if((0 == splSubfrmIdx) || (4 == splSubfrmIdx) ||
10241 (7 == splSubfrmIdx) || (8 == splSubfrmIdx)
10244 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10248 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10253 /* Refer to 36.213 Section 7.1.7 */
10254 if((0 == splSubfrmIdx) || (5 == splSubfrmIdx))
10256 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10260 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10263 /* DwPTS Scheduling Changes End */
10265 splSfCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10266 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
10268 for (sfCount = 0; sfCount < maxDlSubfrms; sfCount++)
10270 sf = cell->subFrms[sfCount];
10271 /* Sfcount matches the first special subframe occurs at Index 0
10272 * or subsequent special subframes */
10273 if(subfrmInfo.switchPoints == 1)
10275 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10276 RG_SCH_CMN_10_MS_PRD, &subfrmInfo);
10280 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10281 RG_SCH_CMN_5_MS_PRD, &subfrmInfo);
10283 if(isSplfrm == TRUE)
10286 /* DwPTS Scheduling Changes Start */
10287 if (cell->splSubfrmCfg.isDlDataAllowed == TRUE)
10289 sf->sfType = RG_SCH_SPL_SF_DATA;
10293 sf->sfType = RG_SCH_SPL_SF_NO_DATA;
10295 /* DwPTS Scheduling Changes End */
10299 /* DwPTS Scheduling Changes Start */
10300 if (sf->sfNum != 0)
10302 sf->sfType = RG_SCH_DL_SF;
10306 sf->sfType = RG_SCH_DL_SF_0;
10308 /* DwPTS Scheduling Changes End */
10311 /* Calculate the number of CCEs per subframe in the cell */
10312 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][sf->sfNum];
10313 if(cell->dynCfiCb.isDynCfiEnb == TRUE)
10315 /* In case if Dynamic CFI feature is enabled, default CFI
10316 * value 1 is used */
10317 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][1];
10321 if (sf->sfType == RG_SCH_SPL_SF_DATA)
10323 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
10327 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi)];
10332 /* Intialize the RACH response scheduling related infromation */
10333 if(rgSCHCmnDlRachInfoInit(cell) != ROK)
10338 /* Allocate PRACH preamble list */
10339 rgSCHCmnDlCreateRachPrmLst(cell);
10341 /* Initialize PHICH offset information */
10342 rgSCHCmnDlPhichOffsetInit(cell);
10344 /* Update the size of HARQ ACK/NACK feedback table */
10345 /* The array size is increased by 2 to have enough free indices, where other
10346 * indices are busy waiting for HARQ feedback */
10347 cell->ackNackFdbkArrSize = rgSchTddANFdbkMapTbl[cell->ulDlCfgIdx] + 2;
10349 /* Initialize expected HARQ ACK/NACK feedback time */
10350 rgSCHCmnDlANFdbkInit(cell);
10352 /* Initialize UL association set index */
10353 if(cell->ulDlCfgIdx != 0)
10355 rgSCHCmnDlKdashUlAscInit(cell);
10358 if (cfg->isCpDlExtend == TRUE)
10360 cp = RG_SCH_CMN_EXT_CP;
10362 cell->splSubfrmCfg.dwPts =
10363 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlDwPts;
10365 if ( cell->splSubfrmCfg.dwPts == 0 )
10367 cell->isDwPtsCnted = FALSE;
10371 cell->isDwPtsCnted = TRUE;
10374 if(cfg->isCpUlExtend == TRUE)
10376 cell->splSubfrmCfg.upPts =
10377 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlExtUpPts;
10381 cell->splSubfrmCfg.upPts =
10382 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlNorUpPts;
10387 cp = RG_SCH_CMN_NOR_CP;
10389 cell->splSubfrmCfg.dwPts =
10390 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlDwPts;
10391 cell->isDwPtsCnted = TRUE;
10393 if(cfg->isCpUlExtend == TRUE)
10395 cell->splSubfrmCfg.upPts =
10396 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlExtUpPts;
10400 cell->splSubfrmCfg.upPts =
10401 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlNorUpPts;
10405 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10406 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++,cfiIdx++)
10408 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10409 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10410 [cell->numTxAntPorts]][cfiIdx];
10411 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10412 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10413 [cell->numTxAntPorts]][cfiIdx];
10416 /* Initializing the values of CFI parameters */
10417 if(cell->dynCfiCb.isDynCfiEnb)
10419 /* If DCFI is enabled, current CFI value will start from 1 */
10420 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10424 /* If DCFI is disabled, current CFI value is set as default max allowed CFI value */
10425 cellSch->dl.currCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10426 cellSch->dl.newCfi = cellSch->dl.currCfi;
10429 /* Include CRS REs while calculating Efficiency
10430 * The number of Resource Elements occupied by CRS depends on Number of
10431 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10432 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10433 * details of the same. Please note that PDCCH overlap symbols would not
10434 * considered in CRS REs deduction */
10435 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10437 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10438 - numPdcchSym) *RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10441 /* DwPTS Scheduling Changes Start */
10442 antPortIdx = (cell->numTxAntPorts == 1)? 0:
10443 ((cell->numTxAntPorts == 2)? 1: 2);
10445 if (cp == RG_SCH_CMN_NOR_CP)
10447 splSfIdx = (splSubfrmIdx == 4)? 1: 0;
10451 splSfIdx = (splSubfrmIdx == 3)? 1: 0;
10454 numCrs = rgSchCmnDwptsCrs[splSfIdx][antPortIdx];
10456 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI-1; cfi++)
10458 /* If CFI is 2 and Ant Port is 4, don't consider the sym 1 CRS REs */
10459 if (antPortIdx == 2 && cfi == 2)
10463 cellSch->dl.numReDwPts[cfi] = ((cell->splSubfrmCfg.dwPts - cfi)*
10464 RB_SCH_CMN_NUM_SCS_PER_RB) - numCrs;
10466 /* DwPTS Scheduling Changes End */
10468 if (cfg->maxDlBwPerUe == 0)
10470 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10474 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10476 if (cfg->maxDlRetxBw == 0)
10478 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10482 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10484 /* Fix: MUE_PERTTI_DL*/
10485 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10486 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10487 if (cfg->maxUePerDlSf == 0)
10489 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10491 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10492 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10493 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10495 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10496 "Invalid configuration !: "
10497 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10498 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10502 else if (!cfg->maxCcchPerDlSf)
10504 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10505 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10506 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10507 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10508 * FLE crash in PHY as PHY has limit of 16 max*/
10509 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10513 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10515 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10520 /*ccpu00118273 - ADD - start */
10521 cmLListInit(&cellSch->dl.msg4RetxLst);
10523 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10526 #ifdef RG_PHASE2_SCHED
10527 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10529 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10531 if (cfg->dlfsCfg.isDlFreqSel)
10533 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10539 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10542 /* Power related configuration */
10543 ret = rgSCHPwrCellCfg(cell, cfg);
10549 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10550 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10551 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10552 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10553 cellSch->dl.msg4pAVal = cfg->msg4pAVal;
10556 #else /* LTE_TDD */
10558 * @brief This function handles the configuration of cell for the first
10559 * time by the scheduler.
10563 * Function: rgSCHCmnDlRgrCellCfg
10564 * Purpose: Configuration received is stored into the data structures
10565 * Also, update the scheduler with the number of frames of
10566 * RACH preamble transmission.
10568 * Invoked by: BO and Scheduler
10570 * @param[in] RgSchCellCb* cell
10571 * @param[in] RgrCellCfg* cfg
10572 * @param[in] RgSchErrInfo* err
10577 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10584 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10591 RgSchCmnCell *cellSch;
10598 TRC2(rgSCHCmnDlRgrCellCfg);
10600 cellSch = RG_SCH_CMN_GET_CELL(cell);
10602 /* Initialize the parameters with the ones received in the */
10603 /* configuration. */
10605 /* Added matrix 'rgRaPrmblToRaFrmTbl' for computation of RA
10606 * sub-frames from preamble format */
10607 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->rachCfg.preambleFormat];
10609 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10610 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10612 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10613 3 TTI (MAX L1+L2 processing delay at the UE) */
10614 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10615 rgSchCmnHarqRtt[7] + 3;
10617 if (cell->bwCfg.dlTotalBw <= 10)
10628 if (cell->isCpDlExtend == TRUE)
10630 cp = RG_SCH_CMN_EXT_CP;
10635 cp = RG_SCH_CMN_NOR_CP;
10639 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10640 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, cfiIdx++)
10642 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10644 cellSch->dl.emtcCqiToTbsTbl[0][cfi] = rgSchEmtcCmnCqiToTbs[0][cp][cfiIdx];
10646 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10647 [cell->numTxAntPorts]][cfiIdx];
10648 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10650 cellSch->dl.emtcCqiToTbsTbl[1][cfi] = rgSchEmtcCmnCqiToTbs[1][cp][cfiIdx];
10652 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10653 [cell->numTxAntPorts]][cfiIdx];
10656 /* Initializing the values of CFI parameters */
10657 if(cell->dynCfiCb.isDynCfiEnb)
10659 /* If DCFI is enabled, current CFI value will start from 1 */
10660 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10664 /* If DCFI is disabled, current CFI value is set as default CFI value */
10665 cellSch->dl.currCfi = cellSch->cfiCfg.cfi;
10666 cellSch->dl.newCfi = cellSch->dl.currCfi;
10669 /* Include CRS REs while calculating Efficiency
10670 * The number of Resource Elements occupied by CRS depends on Number of
10671 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10672 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10673 * details of the same. Please note that PDCCH overlap symbols would not
10674 * considered in CRS REs deduction */
10675 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10677 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10678 - numPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10681 if (cfg->maxDlBwPerUe == 0)
10683 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10687 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10689 if (cfg->maxDlRetxBw == 0)
10691 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10695 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10698 /* Fix: MUE_PERTTI_DL*/
10699 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10700 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10701 if (cfg->maxUePerDlSf == 0)
10703 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10705 /* Fix: MUE_PERTTI_DL syed validating Cell Configuration */
10706 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10708 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10709 "FAILED MaxUePerDlSf(%u) < MaxDlUeNewTxPerTti(%u)",
10710 cellSch->dl.maxUePerDlSf,
10711 cellSch->dl.maxUeNewTxPerTti);
10714 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10715 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10717 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid configuration !: "
10718 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10719 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10723 else if (!cfg->maxCcchPerDlSf)
10725 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10726 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10727 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10728 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10729 * FLE crash in PHY as PHY has limit of 16 max*/
10730 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10734 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10738 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10742 cmLListInit(&cellSch->dl.msg4RetxLst);
10744 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10747 #ifdef RG_PHASE2_SCHED
10748 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10750 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10752 if (cfg->dlfsCfg.isDlFreqSel)
10754 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10760 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10763 /* Power related configuration */
10764 ret = rgSCHPwrCellCfg(cell, cfg);
10770 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10771 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10772 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10773 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10774 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10777 #endif /* LTE_TDD */
10779 /***********************************************************
10781 * Func : rgSCHCmnUlCalcReqRbCeil
10783 * Desc : Calculate RB required to satisfy 'bytes' for
10785 * Returns number of RBs such that requirement
10786 * is necessarily satisfied (does a 'ceiling'
10789 * Ret : Required RBs (U8)
10795 **********************************************************/
10797 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil
10801 RgSchCmnUlCell *cellUl
10804 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil(bytes, cqi, cellUl)
10807 RgSchCmnUlCell *cellUl;
10810 U32 numRe = RGSCH_CEIL((bytes * 8) * 1024, rgSchCmnUlCqiTbl[cqi].eff);
10811 TRC2(rgSCHCmnUlCalcReqRbCeil);
10812 RETVALUE((U8)RGSCH_CEIL(numRe, RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl)));
10815 /***********************************************************
10817 * Func : rgSCHCmnPrecompMsg3Vars
10819 * Desc : Precomputes the following for msg3 allocation:
10820 * 1. numSb and Imcs for msg size A
10821 * 2. numSb and Imcs otherwise
10825 * Notes: The corresponding vars in cellUl struct is filled
10830 **********************************************************/
10832 PRIVATE S16 rgSCHCmnPrecompMsg3Vars
10834 RgSchCmnUlCell *cellUl,
10841 PRIVATE S16 rgSCHCmnPrecompMsg3Vars(cellUl, ccchCqi, msgSzA, sbSize, isEcp)
10842 RgSchCmnUlCell *cellUl;
10854 U16 msg3GrntSz = 0;
10856 TRC2(rgSCHCmnPrecompMsg3Vars);
10858 if (ccchCqi > cellUl->max16qamCqi)
10860 ccchCqi = cellUl->max16qamCqi;
10862 /* #ifndef RG_SCH_CMN_EXP_CP_SUP For ECP Pick the index 1 */
10864 ccchTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10865 ccchMcs = rgSCHCmnUlGetIMcsFrmITbs(ccchTbs, CM_LTE_UE_CAT_1);
10867 /* MCS should fit in 4 bits in RAR */
10873 /* Limit the ccchMcs to 15 as it
10874 * can be inferred from 36.213, section 6.2 that msg3 imcs
10876 * Since, UE doesn't exist right now, we use CAT_1 for ue
10878 while((ccchMcs = (rgSCHCmnUlGetIMcsFrmITbs(
10879 rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi],CM_LTE_UE_CAT_1))
10881 RG_SCH_CMN_MAX_MSG3_IMCS)
10886 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10888 if (msgSzA < RGSCH_MIN_MSG3_GRNT_SZ)
10892 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(msgSzA, ccchCqi, cellUl), sbSize);
10894 numRb = numSb * sbSize;
10895 msg3GrntSz = 8 * msgSzA;
10897 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10900 numRb = numSb * sbSize;
10902 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10906 /* Reversed(Corrected) the assignment for preamble-GrpA
10907 * Refer- TG36.321- section- 5.1.2*/
10908 cellUl->ra.prmblBNumSb = numSb;
10909 cellUl->ra.prmblBIMcs = ccchMcs;
10910 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(RGSCH_MIN_MSG3_GRNT_SZ, \
10914 numRb = numSb * sbSize;
10915 msg3GrntSz = 8 * RGSCH_MIN_MSG3_GRNT_SZ;
10916 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10919 numRb = numSb * sbSize;
10921 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10925 /* Reversed(Corrected) the assignment for preamble-GrpA
10926 * Refer- TG36.321- section- 5.1.2*/
10927 cellUl->ra.prmblANumSb = numSb;
10928 cellUl->ra.prmblAIMcs = ccchMcs;
10932 PUBLIC U32 gPrntPucchDet=0;
10935 /***********************************************************
10937 * Func : rgSCHCmnUlCalcAvailBw
10939 * Desc : Calculates bandwidth available for PUSCH scheduling.
10941 * Ret : S16 (ROK/RFAILED)
10947 **********************************************************/
10949 PRIVATE S16 rgSCHCmnUlCalcAvailBw
10952 RgrCellCfg *cellCfg,
10958 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
10960 RgrCellCfg *cellCfg;
10967 U8 ulBw = cell->bwCfg.ulTotalBw;
10968 U8 n2Rb = cell->pucchCfg.resourceSize;
10969 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
10970 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
10971 U8 n1Cs = cell->pucchCfg.cyclicShift;
10978 U8 exclRb; /* RBs to exclude */
10981 /* To avoid PUCCH and PUSCH collision issue */
10985 /* Maximum value of M as per Table 10.1-1 */
10986 U8 M[RGSCH_MAX_TDD_UL_DL_CFG] = {1, 2, 4, 3, 4, 9, 1};
10988 TRC2(rgSCHCmnUlCalcAvailBw);
10990 if (cell->isCpUlExtend)
10995 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
10997 /* Considering the max no. of CCEs for PUSCH BW calculation
10998 * based on min mi value */
10999 if (cell->ulDlCfgIdx == 0 || cell->ulDlCfgIdx == 6)
11008 totalCce = cell->dynCfiCb.cfi2NCceTbl[mi][cfi];
11010 P = rgSCHCmnGetPValFrmCCE(cell, totalCce-1);
11011 n1PlusOne = cell->rgSchTddNpValTbl[P + 1];
11012 n1Max = (M[cell->ulDlCfgIdx] - 1)*n1PlusOne + (totalCce-1) + n1Pucch;
11014 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11016 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11017 n1Rb = (n1Max - n1RbPart)/ n1PerRb;
11018 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11020 /* get the total Number of RB's to be excluded for PUSCH */
11022 if(n1Pucch < n1RbPart)
11028 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11030 puschRbStart = exclRb/2 + 1;
11032 /* Num of PUCCH RBs = puschRbStart*2 */
11033 if (puschRbStart * 2 >= ulBw)
11035 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11039 *rbStartRef = puschRbStart;
11040 *bwAvailRef = ulBw - puschRbStart * 2;
11042 if(cell->pucchCfg.maxPucchRb !=0 &&
11043 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11045 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11052 /***********************************************************
11054 * Func : rgSCHCmnUlCalcAvailBw
11056 * Desc : Calculates bandwidth available for PUSCH scheduling.
11058 * Ret : S16 (ROK/RFAILED)
11064 **********************************************************/
11066 PRIVATE S16 rgSCHCmnUlCalcAvailBw
11069 RgrCellCfg *cellCfg,
11075 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
11077 RgrCellCfg *cellCfg;
11084 U8 ulBw = cell->bwCfg.ulTotalBw;
11085 U8 n2Rb = cell->pucchCfg.resourceSize;
11086 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
11087 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
11088 U8 n1Cs = cell->pucchCfg.cyclicShift;
11094 U8 exclRb; /* RBs to exclude */
11098 U16 numOfN3PucchRb;
11099 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11102 TRC2(rgSCHCmnUlCalcAvailBw);
11104 if (cell->isCpUlExtend)
11109 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
11111 totalCce = cell->dynCfiCb.cfi2NCceTbl[0][cfi];
11113 n1Max = n1Pucch + totalCce-1;
11115 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11117 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11118 n1Rb = (U8)((n1Max - n1RbPart) / n1PerRb);
11119 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11121 /* get the total Number of RB's to be excluded for PUSCH */
11123 if(n1Pucch < n1RbPart)
11129 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11131 /*Support for PUCCH Format 3*/
11133 if (cell->isPucchFormat3Sptd)
11135 numOfN3PucchRb = RGSCH_CEIL(cellSch->dl.maxUePerDlSf,5);
11136 exclRb = exclRb + numOfN3PucchRb;
11139 puschRbStart = exclRb/2 + 1;
11143 #ifndef ALIGN_64BIT
11144 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%ld:%d:%d:%d:%d:%d]\n",
11145 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11147 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%d:%d:%d:%d:%d:%d]\n",
11148 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11152 if (puschRbStart*2 >= ulBw)
11154 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11158 *rbStartRef = puschRbStart;
11159 *bwAvailRef = ulBw - puschRbStart * 2;
11161 if(cell->pucchCfg.maxPucchRb !=0 &&
11162 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11164 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11173 /***********************************************************
11175 * Func : rgSCHCmnUlCellInit
11177 * Desc : Uplink scheduler initialisation for cell.
11185 **********************************************************/
11187 PRIVATE S16 rgSCHCmnUlCellInit
11190 RgrCellCfg *cellCfg
11193 PRIVATE S16 rgSCHCmnUlCellInit(cell, cellCfg)
11195 RgrCellCfg *cellCfg;
11199 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11200 U8 maxUePerUlSf = cellCfg->maxUePerUlSf;
11202 /* Added configuration for maximum number of MSG3s */
11203 U8 maxMsg3PerUlSf = cellCfg->maxMsg3PerUlSf;
11205 U8 maxUlBwPerUe = cellCfg->maxUlBwPerUe;
11206 U8 sbSize = cellCfg->puschSubBand.size;
11214 U16 ulDlCfgIdx = cell->ulDlCfgIdx;
11215 /* [ccpu00127294]-MOD-Change the max Ul subfrms size in TDD */
11216 U8 maxSubfrms = 2 * rgSchTddNumUlSf[ulDlCfgIdx];
11217 U8 ulToDlMap[12] = {0}; /* maximum 6 Subframes in UL * 2 */
11218 U8 maxUlsubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
11219 [RGSCH_NUM_SUB_FRAMES-1];
11223 U8 maxSubfrms = RG_SCH_CMN_UL_NUM_SF;
11229 #if (defined(LTE_L2_MEAS) )
11230 Inst inst = cell->instIdx;
11231 #endif /* #if (defined(LTE_L2_MEAS) || defined(DEBUGP) */
11232 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
11234 TRC2(rgSCHCmnUlCellInit);
11236 cellUl->maxUeNewTxPerTti = cellCfg->maxUlUeNewTxPerTti;
11237 if (maxUePerUlSf == 0)
11239 maxUePerUlSf = RG_SCH_CMN_MAX_UE_PER_UL_SF;
11242 if (maxMsg3PerUlSf == 0)
11244 maxMsg3PerUlSf = RG_SCH_CMN_MAX_MSG3_PER_UL_SF;
11246 /* fixed the problem while sending raRsp
11247 * if maxMsg3PerUlSf is greater than
11248 * RGSCH_MAX_RNTI_PER_RARNTI
11250 if(maxMsg3PerUlSf > RGSCH_MAX_RNTI_PER_RARNTI)
11252 maxMsg3PerUlSf = RGSCH_MAX_RNTI_PER_RARNTI;
11255 if(maxMsg3PerUlSf > maxUePerUlSf)
11257 maxMsg3PerUlSf = maxUePerUlSf;
11260 /*cellUl->maxAllocPerUlSf = maxUePerUlSf + maxMsg3PerUlSf;*/
11261 /*Max MSG3 should be a subset of Max UEs*/
11262 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11263 cellUl->maxMsg3PerUlSf = maxMsg3PerUlSf;
11265 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11267 /* Fix: MUE_PERTTI_UL syed validating Cell Configuration */
11268 if (cellUl->maxAllocPerUlSf < cellUl->maxUeNewTxPerTti)
11270 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
11271 "FAILED: MaxUePerUlSf(%u) < MaxUlUeNewTxPerTti(%u)",
11272 cellUl->maxAllocPerUlSf,
11273 cellUl->maxUeNewTxPerTti);
11279 for(idx = 0; idx < RGSCH_SF_ALLOC_SIZE; idx++)
11281 for(idx = 0; idx < RGSCH_NUM_SUB_FRAMES; idx++)
11285 ret = rgSCHUtlAllocSBuf(inst, (Data **)&(cell->sfAllocArr[idx].
11286 ulUeInfo.ulAllocInfo), (cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc)));
11289 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation failed ");
11294 if (maxUlBwPerUe == 0)
11296 /* ccpu00139362- Setting to configured UL BW instead of MAX BW(100)*/
11297 maxUlBwPerUe = cell->bwCfg.ulTotalBw;
11299 cellUl->maxUlBwPerUe = maxUlBwPerUe;
11301 /* FOR RG_SCH_CMN_EXT_CP_SUP */
11302 if (!cellCfg->isCpUlExtend)
11304 cellUl->ulNumRePerRb = 12 * (14 - RGSCH_UL_SYM_DMRS_SRS);
11308 cellUl->ulNumRePerRb = 12 * (12 - RGSCH_UL_SYM_DMRS_SRS);
11311 if (sbSize != rgSchCmnMult235Tbl[sbSize].match)
11313 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Invalid subband size %d", sbSize);
11316 //Setting the subband size to 4 which is size of VRBG in 5GTF
11318 sbSize = MAX_5GTF_VRBG_SIZE;
11321 maxSbPerUe = maxUlBwPerUe / sbSize;
11322 if (maxSbPerUe == 0)
11324 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnUlCellInit(): "
11325 "maxUlBwPerUe/sbSize is zero");
11328 cellUl->maxSbPerUe = rgSchCmnMult235Tbl[maxSbPerUe].prvMatch;
11330 /* CQI related updations */
11331 if ((!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->ulCmnCodeRate.ccchCqi))
11332 || (!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->trgUlCqi.trgCqi)))
11334 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnUlCellInit(): "
11338 cellUl->dfltUlCqi = cellCfg->ulCmnCodeRate.ccchCqi;
11340 /* Changed the logic to determine maxUlCqi.
11341 * For a 16qam UE, maxUlCqi is the CQI Index at which
11342 * efficiency is as close as possible to RG_SCH_MAX_CODE_RATE_16QAM
11343 * Refer to 36.213-8.6.1 */
11344 for (i = RG_SCH_CMN_UL_NUM_CQI - 1;i > 0; --i)
11346 RLOG_ARG2(L_INFO,DBG_CELLID,cell->cellId,
11349 rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i]);
11350 #ifdef MAC_SCH_STATS
11351 /* ccpu00128489 ADD Update mcs in hqFailStats here instead of at CRC
11352 * since CQI to MCS mapping does not change. The only exception is for
11353 * ITBS = 19 where the MCS can be 20 or 21 based on the UE cat. We
11354 * choose 20, instead of 21, ie UE_CAT_3 */
11355 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11356 RG_SCH_CMN_UL_TBS_TO_MCS(iTbs, hqFailStats.ulCqiStat[i - 1].mcs);
11359 for (i = RG_SCH_CMN_UL_NUM_CQI - 1; i != 0; --i)
11361 /* Fix for ccpu00123912*/
11362 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11363 if (iTbs <= RGSCH_UL_16QAM_MAX_ITBS) /* corresponds to 16QAM */
11365 RLOG_ARG1(L_INFO,DBG_CELLID,cell->cellId,
11366 "16 QAM CQI %u", i);
11367 cellUl->max16qamCqi = i;
11373 /* Precompute useful values for RA msg3 */
11374 ret = rgSCHCmnPrecompEmtcMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11375 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11382 /* Precompute useful values for RA msg3 */
11383 ret = rgSCHCmnPrecompMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11384 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11390 cellUl->sbSize = sbSize;
11393 cellUl->numUlSubfrms = maxSubfrms;
11395 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->ulSfArr,
11396 cellUl->numUlSubfrms * sizeof(RgSchUlSf));
11400 cellUl->numUlSubfrms = 0;
11404 /* store the DL subframe corresponding to the PUSCH offset
11405 * in their respective UL subframe */
11406 for(i=0; i < RGSCH_NUM_SUB_FRAMES; i++)
11408 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][i] != 0)
11410 subfrm = (i + rgSchTddPuschTxKTbl[ulDlCfgIdx][i]) % \
11411 RGSCH_NUM_SUB_FRAMES;
11412 subfrm = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subfrm]-1;
11413 dlIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][i]-1;
11414 RGSCH_ARRAY_BOUND_CHECK( cell->instIdx, ulToDlMap, subfrm);
11415 ulToDlMap[subfrm] = dlIdx;
11418 /* Copy the information in the remaining UL subframes based
11419 * on number of HARQ processes */
11420 for(i=maxUlsubfrms; i < maxSubfrms; i++)
11422 subfrm = i-maxUlsubfrms;
11423 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, i);
11424 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, subfrm)
11425 ulToDlMap[i] = ulToDlMap[subfrm];
11429 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++)
11432 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11434 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11443 cell->ulAvailBw = bwAvail;
11446 numSb = bwAvail/sbSize;
11448 cell->dynCfiCb.bwInfo[cfi].startRb = rbStart;
11449 cell->dynCfiCb.bwInfo[cfi].numSb = numSb;
11452 if(0 == cell->dynCfiCb.maxCfi)
11454 RLOG_ARG3(L_ERROR,DBG_CELLID,cell->cellId,
11455 "Incorrect Default CFI(%u), maxCfi(%u), maxPucchRb(%d)",
11456 cellSch->cfiCfg.cfi, cell->dynCfiCb.maxCfi,
11457 cell->pucchCfg.maxPucchRb);
11463 cellUl->dmrsArrSize = cell->dynCfiCb.bwInfo[1].numSb;
11464 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->dmrsArr,
11465 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11470 for (i = 0; i < cellUl->dmrsArrSize; ++i)
11472 cellUl->dmrsArr[i] = cellCfg->puschSubBand.dmrs[i];
11475 /* Init subframes */
11476 for (i = 0; i < maxSubfrms; ++i)
11478 ret = rgSCHUtlUlSfInit(cell, &cellUl->ulSfArr[i], i,
11479 cellUl->maxAllocPerUlSf);
11482 for (; i != 0; --i)
11484 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[i-1]);
11486 /* ccpu00117052 - MOD - Passing double pointer
11487 for proper NULLP assignment*/
11488 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)(&(cellUl->dmrsArr)),
11489 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11491 /* ccpu00117052 - MOD - Passing double pointer
11492 for proper NULLP assignment*/
11493 rgSCHUtlFreeSBuf(cell->instIdx,
11494 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11499 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cellUl);
11504 * @brief Scheduler processing on cell configuration.
11508 * Function : rgSCHCmnRgrCellCfg
11510 * This function does requisite initialisation
11511 * and setup for scheduler1 when a cell is
11514 * @param[in] RgSchCellCb *cell
11515 * @param[in] RgrCellCfg *cellCfg
11516 * @param[out] RgSchErrInfo *err
11522 PUBLIC S16 rgSCHCmnRgrCellCfg
11525 RgrCellCfg *cellCfg,
11529 PUBLIC S16 rgSCHCmnRgrCellCfg(cell, cellCfg, err)
11531 RgrCellCfg *cellCfg;
11536 RgSchCmnCell *cellSch;
11537 TRC2(rgSCHCmnRgrCellCfg);
11539 /* As part of RGR cell configuration, validate the CRGCellCfg
11540 * There is no trigger for crgCellCfg from SC1 */
11541 /* Removed failure check for Extended CP */
11543 if (((ret = rgSCHUtlAllocSBuf(cell->instIdx,
11544 (Data**)&(cell->sc.sch), (sizeof(RgSchCmnCell)))) != ROK))
11546 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
11547 "Memory allocation FAILED");
11548 err->errCause = RGSCHERR_SCH_CFG;
11551 cellSch = (RgSchCmnCell *)(cell->sc.sch);
11552 cellSch->cfiCfg = cellCfg->cfiCfg;
11553 cellSch->trgUlCqi.trgCqi = cellCfg->trgUlCqi.trgCqi;
11554 /* Initialize the scheduler refresh timer queues */
11555 cellSch->tmrTqCp.nxtEnt = 0;
11556 cellSch->tmrTqCp.tmrLen = RG_SCH_CMN_NUM_REFRESH_Q;
11558 /* RACHO Intialize the RACH ded Preamble Information */
11559 rgSCHCmnCfgRachDedPrm(cell);
11561 /* Initialize 'Np' value for each 'p' used for
11562 * HARQ ACK/NACK reception */
11563 rgSCHCmnDlNpValInit(cell);
11566 /* Initialize 'Np' value for each 'p' used for
11567 * HARQ ACK/NACK reception */
11569 rgSCHCmnDlNpValInit(cell);
11572 /* Now perform uplink related initializations */
11573 ret = rgSCHCmnUlCellInit(cell, cellCfg);
11576 /* There is no downlink deinit to be performed */
11577 err->errCause = RGSCHERR_SCH_CFG;
11580 ret = rgSCHCmnDlRgrCellCfg(cell, cellCfg, err);
11583 err->errCause = RGSCHERR_SCH_CFG;
11586 /* DL scheduler has no initializations to make */
11587 /* As of now DL scheduler always returns ROK */
11589 rgSCHCmnGetDciFrmtSizes(cell);
11590 rgSCHCmnGetCqiDciFrmt2AggrLvl(cell);
11592 rgSCHCmnGetEmtcDciFrmtSizes(cell);
11593 rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl(cell);
11594 #endif /* EMTC_ENABLE */
11597 if(TRUE == cellCfg->emtcEnable)
11599 cellSch->apisEmtcUl = &rgSchEmtcUlSchdTbl[0];
11600 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11607 cellSch->apisUl = &rgSchUlSchdTbl[RG_SCH_CMN_GET_UL_SCHED_TYPE(cell)];
11608 ret = cellSch->apisUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11614 if(TRUE == cellCfg->emtcEnable)
11616 cellSch->apisEmtcDl = &rgSchEmtcDlSchdTbl[0];
11617 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11624 cellSch->apisDl = &rgSchDlSchdTbl[RG_SCH_CMN_GET_DL_SCHED_TYPE(cell)];
11626 /* Perform SPS specific initialization for the cell */
11627 ret = rgSCHCmnSpsCellCfg(cell, cellCfg, err);
11633 ret = cellSch->apisDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11638 rgSCHCmnInitVars(cell);
11641 } /* rgSCHCmnRgrCellCfg*/
11645 * @brief This function handles the reconfiguration of cell.
11649 * Function: rgSCHCmnRgrCellRecfg
11650 * Purpose: Update the reconfiguration parameters.
11652 * Invoked by: Scheduler
11654 * @param[in] RgSchCellCb* cell
11659 PUBLIC S16 rgSCHCmnRgrCellRecfg
11662 RgrCellRecfg *recfg,
11666 PUBLIC S16 rgSCHCmnRgrCellRecfg(cell, recfg, err)
11668 RgrCellRecfg *recfg;
11673 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11674 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11676 TRC2(rgSCHCmnRgrCellRecfg);
11678 if (recfg->recfgTypes & RGR_CELL_UL_CMNRATE_RECFG)
11680 U8 oldCqi = cellUl->dfltUlCqi;
11681 if (!RG_SCH_CMN_UL_IS_CQI_VALID(recfg->ulCmnCodeRate.ccchCqi))
11683 err->errCause = RGSCHERR_SCH_CFG;
11684 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnRgrCellRecfg(): "
11688 cellUl->dfltUlCqi = recfg->ulCmnCodeRate.ccchCqi;
11689 ret = rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11690 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11693 cellUl->dfltUlCqi = oldCqi;
11694 rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11695 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11700 if (recfg->recfgTypes & RGR_CELL_DL_CMNRATE_RECFG)
11702 if (rgSCHCmnDlCnsdrCmnRt(cell, &recfg->dlCmnCodeRate) != ROK)
11704 err->errCause = RGSCHERR_SCH_CFG;
11710 if(TRUE == cell->emtcEnable)
11712 /* Invoke UL sched for cell Recfg */
11713 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11719 /* Invoke DL sched for cell Recfg */
11720 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11729 /* Invoke UL sched for cell Recfg */
11730 ret = cellSch->apisUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11736 /* Invoke DL sched for cell Recfg */
11737 ret = cellSch->apisDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11744 if (recfg->recfgTypes & RGR_CELL_DLFS_RECFG)
11746 ret = cellSch->apisDlfs->rgSCHDlfsCellRecfg(cell, recfg, err);
11751 cellSch->dl.isDlFreqSel = recfg->dlfsRecfg.isDlFreqSel;
11754 if (recfg->recfgTypes & RGR_CELL_PWR_RECFG)
11756 ret = rgSCHPwrCellRecfg(cell, recfg);
11766 /***********************************************************
11768 * Func : rgSCHCmnUlCellDeinit
11770 * Desc : Uplink scheduler de-initialisation for cell.
11778 **********************************************************/
11780 PRIVATE Void rgSCHCmnUlCellDeinit
11785 PRIVATE Void rgSCHCmnUlCellDeinit(cell)
11789 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11792 U8 maxSubfrms = cellUl->numUlSubfrms;
11795 CmLList *lnk = NULLP;
11796 RgSchL2MeasCb *measCb;
11798 TRC2(rgSCHCmnUlCellDeinit);
11801 for(ulSfIdx = 0; ulSfIdx < RGSCH_SF_ALLOC_SIZE; ulSfIdx++)
11803 for(ulSfIdx = 0; ulSfIdx < RGSCH_NUM_SUB_FRAMES; ulSfIdx++)
11806 if(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo != NULLP)
11808 /* ccpu00117052 - MOD - Passing double pointer
11809 for proper NULLP assignment*/
11810 rgSCHUtlFreeSBuf(cell->instIdx,
11811 (Data **)(&(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo)),
11812 cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc));
11814 /* ccpu00117052 - DEL - removed explicit NULLP assignment
11815 as it is done in above utility function */
11818 /* Free the memory allocated to measCb */
11819 lnk = cell->l2mList.first;
11820 while(lnk != NULLP)
11822 measCb = (RgSchL2MeasCb *)lnk->node;
11823 cmLListDelFrm(&cell->l2mList, lnk);
11825 /* ccpu00117052 - MOD - Passing double pointer
11826 for proper NULLP assignment*/
11827 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&measCb,\
11828 sizeof(RgSchL2MeasCb));
11831 if (cellUl->dmrsArr != NULLP)
11833 /* ccpu00117052 - MOD - Passing double pointer
11834 for proper NULLP assignment*/
11835 rgSCHUtlFreeSBuf(cell->instIdx,(Data **)(&(cellUl->dmrsArr)),
11836 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11838 /* De-init subframes */
11840 for (ulSfIdx = 0; ulSfIdx < maxSubfrms; ++ulSfIdx)
11842 for (ulSfIdx = 0; ulSfIdx < RG_SCH_CMN_UL_NUM_SF; ++ulSfIdx)
11845 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[ulSfIdx]);
11849 if (cellUl->ulSfArr != NULLP)
11851 /* ccpu00117052 - MOD - Passing double pointer
11852 for proper NULLP assignment*/
11853 rgSCHUtlFreeSBuf(cell->instIdx,
11854 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11862 * @brief Scheduler processing for cell delete.
11866 * Function : rgSCHCmnCellDel
11868 * This functions de-initialises and frees memory
11869 * taken up by scheduler1 for the entire cell.
11871 * @param[in] RgSchCellCb *cell
11875 PUBLIC Void rgSCHCmnCellDel
11880 PUBLIC Void rgSCHCmnCellDel(cell)
11884 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11885 TRC2(rgSCHCmnCellDel);
11890 if (cellSch == NULLP)
11894 /* Perform the deinit for the UL scheduler */
11895 rgSCHCmnUlCellDeinit(cell);
11897 if(TRUE == cell->emtcEnable)
11899 if (cellSch->apisEmtcUl)
11901 cellSch->apisEmtcUl->rgSCHFreeUlCell(cell);
11905 if (cellSch->apisUl)
11907 /* api pointer checks added (here and below in
11908 * this function). pl check. - antriksh */
11909 cellSch->apisUl->rgSCHFreeUlCell(cell);
11912 /* Perform the deinit for the DL scheduler */
11913 cmLListInit(&cellSch->dl.taLst);
11914 if (cellSch->apisDl)
11916 cellSch->apisDl->rgSCHFreeDlCell(cell);
11919 if (cellSch->apisEmtcDl)
11921 rgSCHEmtcInitTaLst(&cellSch->dl);
11923 cellSch->apisEmtcDl->rgSCHFreeDlCell(cell);
11927 /* DLFS de-initialization */
11928 if (cellSch->dl.isDlFreqSel && cellSch->apisDlfs)
11930 cellSch->apisDlfs->rgSCHDlfsCellDel(cell);
11933 rgSCHPwrCellDel(cell);
11935 rgSCHCmnSpsCellDel(cell);
11938 /* ccpu00117052 - MOD - Passing double pointer
11939 for proper NULLP assignment*/
11940 rgSCHUtlFreeSBuf(cell->instIdx,
11941 (Data**)(&(cell->sc.sch)), (sizeof(RgSchCmnCell)));
11943 } /* rgSCHCmnCellDel */
11947 * @brief This function validates QOS parameters for DL.
11951 * Function: rgSCHCmnValidateDlQos
11952 * Purpose: This function validates QOS parameters for DL.
11954 * Invoked by: Scheduler
11956 * @param[in] CrgLchQosCfg *dlQos
11961 PRIVATE S16 rgSCHCmnValidateDlQos
11963 RgrLchQosCfg *dlQos
11966 PRIVATE S16 rgSCHCmnValidateDlQos(dlQos)
11967 RgrLchQosCfg *dlQos;
11970 U8 qci = dlQos->qci;
11972 TRC2(rgSCHCmnValidateDlQos);
11974 if ( qci < RG_SCH_CMN_MIN_QCI || qci > RG_SCH_CMN_MAX_QCI )
11979 if ((qci >= RG_SCH_CMN_GBR_QCI_START) &&
11980 (qci <= RG_SCH_CMN_GBR_QCI_END))
11982 if ((dlQos->mbr == 0) || (dlQos->mbr < dlQos->gbr))
11991 * @brief Scheduler invocation on logical channel addition.
11995 * Function : rgSCHCmnRgrLchCfg
11997 * This functions does required processing when a new
11998 * (dedicated) logical channel is added. Assumes lcg
11999 * pointer in ulLc is set.
12001 * @param[in] RgSchCellCb *cell
12002 * @param[in] RgSchUeCb *ue
12003 * @param[in] RgSchDlLcCb *dlLc
12004 * @param[int] RgrLchCfg *lcCfg
12005 * @param[out] RgSchErrInfo *err
12011 PUBLIC S16 rgSCHCmnRgrLchCfg
12020 PUBLIC S16 rgSCHCmnRgrLchCfg(cell, ue, dlLc, lcCfg, err)
12030 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12032 TRC2(rgSCHCmnRgrLchCfg);
12034 ret = rgSCHUtlAllocSBuf(cell->instIdx,
12035 (Data**)&((dlLc)->sch), (sizeof(RgSchCmnDlSvc)));
12038 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRgrLchCfg(): "
12039 "SCH struct alloc failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12040 err->errCause = RGSCHERR_SCH_CFG;
12043 if(lcCfg->lcType != CM_LTE_LCH_DCCH)
12045 ret = rgSCHCmnValidateDlQos(&lcCfg->dlInfo.dlQos);
12048 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSchCmnCrgLcCfg(): "
12049 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12050 err->errCause = RGSCHERR_SCH_CFG;
12053 /* Perform DL service activation in the scheduler */
12054 ((RgSchCmnDlSvc *)(dlLc->sch))->qci = lcCfg->dlInfo.dlQos.qci;
12055 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = rgSchCmnDlQciPrio[lcCfg->dlInfo.dlQos.qci - 1];
12056 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcCfg->dlInfo.dlQos.gbr * \
12057 RG_SCH_CMN_REFRESH_TIME)/100;
12058 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcCfg->dlInfo.dlQos.mbr * \
12059 RG_SCH_CMN_REFRESH_TIME)/100;
12063 /*assigning highest priority to DCCH */
12064 ((RgSchCmnDlSvc *)(dlLc->sch))->prio=RG_SCH_CMN_DCCH_PRIO;
12067 dlLc->lcType=lcCfg->lcType;
12070 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12072 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcCfg(cell, ue,dlLc ,lcCfg, err);
12081 ret = cellSch->apisDl->rgSCHRgrDlLcCfg(cell, ue, dlLc, lcCfg, err);
12089 if(TRUE == ue->isEmtcUe)
12091 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12100 ret = cellSch->apisUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12110 rgSCHSCellDlLcCfg(cell, ue, dlLc);
12116 if(lcCfg->dlInfo.dlSpsCfg.isSpsEnabled)
12118 /* Invoke SPS module if SPS is enabled for the service */
12119 ret = rgSCHCmnSpsDlLcCfg(cell, ue, dlLc, lcCfg, err);
12122 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "rgSchCmnRgrLchCfg(): "
12123 "SPS configuration failed for DL LC for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12124 err->errCause = RGSCHERR_SCH_CFG;
12134 * @brief Scheduler invocation on logical channel addition.
12138 * Function : rgSCHCmnRgrLchRecfg
12140 * This functions does required processing when an existing
12141 * (dedicated) logical channel is reconfigured. Assumes lcg
12142 * pointer in ulLc is set to the old value.
12143 * Independent of whether new LCG is meant to be configured,
12144 * the new LCG scheduler information is accessed and possibly modified.
12146 * @param[in] RgSchCellCb *cell
12147 * @param[in] RgSchUeCb *ue
12148 * @param[in] RgSchDlLcCb *dlLc
12149 * @param[int] RgrLchRecfg *lcRecfg
12150 * @param[out] RgSchErrInfo *err
12156 PUBLIC S16 rgSCHCmnRgrLchRecfg
12161 RgrLchRecfg *lcRecfg,
12165 PUBLIC S16 rgSCHCmnRgrLchRecfg(cell, ue, dlLc, lcRecfg, err)
12169 RgrLchRecfg *lcRecfg;
12174 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12176 TRC2(rgSCHCmnRgrLchRecfg)
12178 if(dlLc->lcType != CM_LTE_LCH_DCCH)
12180 ret = rgSCHCmnValidateDlQos(&lcRecfg->dlRecfg.dlQos);
12184 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
12185 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12186 err->errCause = RGSCHERR_SCH_CFG;
12189 if (((RgSchCmnDlSvc *)(dlLc->sch))->qci != lcRecfg->dlRecfg.dlQos.qci)
12191 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Qci, hence lc Priority change "
12192 "not supported for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12193 err->errCause = RGSCHERR_SCH_CFG;
12196 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcRecfg->dlRecfg.dlQos.gbr * \
12197 RG_SCH_CMN_REFRESH_TIME)/100;
12198 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcRecfg->dlRecfg.dlQos.mbr * \
12199 RG_SCH_CMN_REFRESH_TIME)/100;
12203 /*assigning highest priority to DCCH */
12204 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = RG_SCH_CMN_DCCH_PRIO;
12208 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12210 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12215 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12224 ret = cellSch->apisDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12229 ret = cellSch->apisUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12237 if (lcRecfg->recfgTypes & RGR_DL_LC_SPS_RECFG)
12239 /* Invoke SPS module if SPS is enabled for the service */
12240 if(lcRecfg->dlRecfg.dlSpsRecfg.isSpsEnabled)
12242 ret = rgSCHCmnSpsDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12245 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"SPS re-configuration not "
12246 "supported for dlLC Ignore this CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12257 * @brief Scheduler invocation on logical channel addition.
12261 * Function : rgSCHCmnRgrLcgCfg
12263 * This functions does required processing when a new
12264 * (dedicated) logical channel is added. Assumes lcg
12265 * pointer in ulLc is set.
12267 * @param[in] RgSchCellCb *cell,
12268 * @param[in] RgSchUeCb *ue,
12269 * @param[in] RgSchLcgCb *lcg,
12270 * @param[in] RgrLcgCfg *lcgCfg,
12271 * @param[out] RgSchErrInfo *err
12277 PUBLIC S16 rgSCHCmnRgrLcgCfg
12286 PUBLIC S16 rgSCHCmnRgrLcgCfg(cell, ue, lcg, lcgCfg, err)
12295 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12296 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCfg->ulInfo.lcgId].sch));
12298 TRC2(rgSCHCmnRgrLcgCfg);
12300 ulLcg->cfgdGbr = (lcgCfg->ulInfo.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12301 ulLcg->effGbr = ulLcg->cfgdGbr;
12302 ulLcg->deltaMbr = ((lcgCfg->ulInfo.mbr - lcgCfg->ulInfo.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12303 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12306 if(TRUE == ue->isEmtcUe)
12308 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12317 ret = cellSch->apisUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12323 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12325 /* Indicate MAC that this LCG is GBR LCG */
12326 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcgCfg->ulInfo.lcgId, TRUE);
12332 * @brief Scheduler invocation on logical channel addition.
12336 * Function : rgSCHCmnRgrLcgRecfg
12338 * This functions does required processing when a new
12339 * (dedicated) logical channel is added. Assumes lcg
12340 * pointer in ulLc is set.
12342 * @param[in] RgSchCellCb *cell,
12343 * @param[in] RgSchUeCb *ue,
12344 * @param[in] RgSchLcgCb *lcg,
12345 * @param[in] RgrLcgRecfg *reCfg,
12346 * @param[out] RgSchErrInfo *err
12352 PUBLIC S16 rgSCHCmnRgrLcgRecfg
12357 RgrLcgRecfg *reCfg,
12361 PUBLIC S16 rgSCHCmnRgrLcgRecfg(cell, ue, lcg, reCfg, err)
12365 RgrLcgRecfg *reCfg;
12370 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12371 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[reCfg->ulRecfg.lcgId].sch));
12373 TRC2(rgSCHCmnRgrLcgRecfg);
12375 ulLcg->cfgdGbr = (reCfg->ulRecfg.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12376 ulLcg->effGbr = ulLcg->cfgdGbr;
12377 ulLcg->deltaMbr = ((reCfg->ulRecfg.mbr - reCfg->ulRecfg.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12378 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12381 if(TRUE == ue->isEmtcUe)
12383 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12392 ret = cellSch->apisUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12398 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12400 /* Indicate MAC that this LCG is GBR LCG */
12401 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, TRUE);
12405 /* In case of RAB modification */
12406 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, FALSE);
12411 /***********************************************************
12413 * Func : rgSCHCmnRgrLchDel
12415 * Desc : Scheduler handling for a (dedicated)
12416 * uplink logical channel being deleted.
12423 **********************************************************/
12425 PUBLIC S16 rgSCHCmnRgrLchDel
12433 PUBLIC S16 rgSCHCmnRgrLchDel(cell, ue, lcId, lcgId)
12440 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12441 TRC2(rgSCHCmnRgrLchDel);
12443 if(TRUE == ue->isEmtcUe)
12445 cellSch->apisEmtcUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12450 cellSch->apisUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12455 /***********************************************************
12457 * Func : rgSCHCmnLcgDel
12459 * Desc : Scheduler handling for a (dedicated)
12460 * uplink logical channel being deleted.
12468 **********************************************************/
12470 PUBLIC Void rgSCHCmnLcgDel
12477 PUBLIC Void rgSCHCmnLcgDel(cell, ue, lcg)
12483 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12484 RgSchCmnLcg *lcgCmn = RG_SCH_CMN_GET_UL_LCG(lcg);
12485 TRC2(rgSCHCmnLcgDel);
12487 if (lcgCmn == NULLP)
12492 if (RGSCH_IS_GBR_BEARER(lcgCmn->cfgdGbr))
12494 /* Indicate MAC that this LCG is GBR LCG */
12495 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcg->lcgId, FALSE);
12499 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
12501 rgSCHCmnSpsUlLcgDel(cell, ue, lcg);
12503 #endif /* LTEMAC_SPS */
12505 lcgCmn->effGbr = 0;
12506 lcgCmn->reportedBs = 0;
12507 lcgCmn->cfgdGbr = 0;
12508 /* set lcg bs to 0. Deletion of control block happens
12509 * at the time of UE deletion. */
12512 if(TRUE == ue->isEmtcUe)
12514 cellSch->apisEmtcUl->rgSCHFreeUlLcg(cell, ue, lcg);
12519 cellSch->apisUl->rgSCHFreeUlLcg(cell, ue, lcg);
12526 * @brief This function deletes a service from scheduler.
12530 * Function: rgSCHCmnFreeDlLc
12531 * Purpose: This function is made available through a FP for
12532 * making scheduler aware of a service being deleted from UE.
12534 * Invoked by: BO and Scheduler
12536 * @param[in] RgSchCellCb* cell
12537 * @param[in] RgSchUeCb* ue
12538 * @param[in] RgSchDlLcCb* svc
12543 PUBLIC Void rgSCHCmnFreeDlLc
12550 PUBLIC Void rgSCHCmnFreeDlLc(cell, ue, svc)
12556 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12557 TRC2(rgSCHCmnFreeDlLc);
12558 if (svc->sch == NULLP)
12563 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12565 cellSch->apisEmtcDl->rgSCHFreeDlLc(cell, ue, svc);
12570 cellSch->apisDl->rgSCHFreeDlLc(cell, ue, svc);
12576 rgSCHSCellDlLcDel(cell, ue, svc);
12581 /* If SPS service, invoke SPS module */
12582 if (svc->dlLcSpsCfg.isSpsEnabled)
12584 rgSCHCmnSpsDlLcDel(cell, ue, svc);
12588 /* ccpu00117052 - MOD - Passing double pointer
12589 for proper NULLP assignment*/
12590 rgSCHUtlFreeSBuf(cell->instIdx,
12591 (Data**)(&(svc->sch)), (sizeof(RgSchCmnDlSvc)));
12594 rgSCHLaaDeInitDlLchCb(cell, svc);
12603 * @brief This function Processes the Final Allocations
12604 * made by the RB Allocator against the requested
12605 * CCCH SDURetx Allocations.
12609 * Function: rgSCHCmnDlCcchSduRetxFnlz
12610 * Purpose: This function Processes the Final Allocations
12611 * made by the RB Allocator against the requested
12612 * CCCH Retx Allocations.
12613 * Scans through the scheduled list of ccchSdu retrans
12614 * fills the corresponding pdcch, adds the hqProc to
12615 * the corresponding SubFrm and removes the hqP from
12618 * Invoked by: Common Scheduler
12620 * @param[in] RgSchCellCb *cell
12621 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12626 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz
12629 RgSchCmnDlRbAllocInfo *allocInfo
12632 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo)
12634 RgSchCmnDlRbAllocInfo *allocInfo;
12638 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12639 RgSchDlRbAlloc *rbAllocInfo;
12640 RgSchDlHqProcCb *hqP;
12642 TRC2(rgSCHCmnDlCcchSduRetxFnlz);
12644 /* Traverse through the Scheduled Retx List */
12645 node = allocInfo->ccchSduAlloc.schdCcchSduRetxLst.first;
12648 hqP = (RgSchDlHqProcCb *)(node->node);
12650 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
12652 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12654 /* Remove the HqP from cell's ccchSduRetxLst */
12655 cmLListDelFrm(&cmnCellDl->ccchSduRetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12656 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12658 /* Fix: syed dlAllocCb reset should be performed.
12659 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12660 rgSCHCmnDlUeResetTemp(ue, hqP);
12662 /* Fix: syed dlAllocCb reset should be performed.
12663 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12664 node = allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst.first;
12667 hqP = (RgSchDlHqProcCb *)(node->node);
12670 /* reset the UE allocation Information */
12671 rgSCHCmnDlUeResetTemp(ue, hqP);
12677 * @brief This function Processes the Final Allocations
12678 * made by the RB Allocator against the requested
12679 * CCCH Retx Allocations.
12683 * Function: rgSCHCmnDlCcchRetxFnlz
12684 * Purpose: This function Processes the Final Allocations
12685 * made by the RB Allocator against the requested
12686 * CCCH Retx Allocations.
12687 * Scans through the scheduled list of msg4 retrans
12688 * fills the corresponding pdcch, adds the hqProc to
12689 * the corresponding SubFrm and removes the hqP from
12692 * Invoked by: Common Scheduler
12694 * @param[in] RgSchCellCb *cell
12695 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12700 PRIVATE Void rgSCHCmnDlCcchRetxFnlz
12703 RgSchCmnDlRbAllocInfo *allocInfo
12706 PRIVATE Void rgSCHCmnDlCcchRetxFnlz(cell, allocInfo)
12708 RgSchCmnDlRbAllocInfo *allocInfo;
12712 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12713 RgSchDlRbAlloc *rbAllocInfo;
12714 RgSchDlHqProcCb *hqP;
12716 TRC2(rgSCHCmnDlCcchRetxFnlz);
12718 /* Traverse through the Scheduled Retx List */
12719 node = allocInfo->msg4Alloc.schdMsg4RetxLst.first;
12722 hqP = (RgSchDlHqProcCb *)(node->node);
12723 raCb = hqP->hqE->raCb;
12724 rbAllocInfo = &raCb->rbAllocInfo;
12726 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12728 /* Remove the HqP from cell's msg4RetxLst */
12729 cmLListDelFrm(&cmnCellDl->msg4RetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12730 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12731 /* Fix: syed dlAllocCb reset should be performed.
12732 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12733 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12734 rgSCHCmnDlHqPResetTemp(hqP);
12736 /* Fix: syed dlAllocCb reset should be performed.
12737 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12738 node = allocInfo->msg4Alloc.nonSchdMsg4RetxLst.first;
12741 hqP = (RgSchDlHqProcCb *)(node->node);
12742 raCb = hqP->hqE->raCb;
12744 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12745 rgSCHCmnDlHqPResetTemp(hqP);
12752 * @brief This function Processes the Final Allocations
12753 * made by the RB Allocator against the requested
12754 * CCCH SDU tx Allocations.
12758 * Function: rgSCHCmnDlCcchSduTxFnlz
12759 * Purpose: This function Processes the Final Allocations
12760 * made by the RB Allocator against the requested
12761 * CCCH tx Allocations.
12762 * Scans through the scheduled list of CCCH SDU trans
12763 * fills the corresponding pdcch, adds the hqProc to
12764 * the corresponding SubFrm and removes the hqP from
12767 * Invoked by: Common Scheduler
12769 * @param[in] RgSchCellCb *cell
12770 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12775 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz
12778 RgSchCmnDlRbAllocInfo *allocInfo
12781 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo)
12783 RgSchCmnDlRbAllocInfo *allocInfo;
12788 RgSchDlRbAlloc *rbAllocInfo;
12789 RgSchDlHqProcCb *hqP;
12790 RgSchLchAllocInfo lchSchdData;
12791 TRC2(rgSCHCmnDlCcchSduTxFnlz);
12793 /* Traverse through the Scheduled Retx List */
12794 node = allocInfo->ccchSduAlloc.schdCcchSduTxLst.first;
12797 hqP = (RgSchDlHqProcCb *)(node->node);
12798 ueCb = hqP->hqE->ue;
12800 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
12802 /* fill the pdcch and HqProc */
12803 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12805 /* Remove the raCb from cell's toBeSchdLst */
12806 cmLListDelFrm(&cell->ccchSduUeLst, &ueCb->ccchSduLnk);
12807 ueCb->ccchSduLnk.node = (PTR)NULLP;
12809 /* Fix : Resetting this required to avoid complication
12810 * in reestablishment case */
12811 ueCb->dlCcchInfo.bo = 0;
12813 /* Indicate DHM of the CCCH LC scheduling */
12814 hqP->tbInfo[0].contResCe = NOTPRSNT;
12815 lchSchdData.lcId = 0;
12816 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12817 (RGSCH_MSG4_HDRSIZE);
12818 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12820 /* Fix: syed dlAllocCb reset should be performed.
12821 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12822 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12824 /* Fix: syed dlAllocCb reset should be performed.
12825 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12826 node = allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst.first;
12829 hqP = (RgSchDlHqProcCb *)(node->node);
12830 ueCb = hqP->hqE->ue;
12832 /* Release HqProc */
12833 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12834 /*Fix: Removing releasing of TB1 as it will not exist for CCCH SDU and hence caused a crash*/
12835 /*rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12836 /* reset the UE allocation Information */
12837 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12844 * @brief This function Processes the Final Allocations
12845 * made by the RB Allocator against the requested
12846 * CCCH tx Allocations.
12850 * Function: rgSCHCmnDlCcchTxFnlz
12851 * Purpose: This function Processes the Final Allocations
12852 * made by the RB Allocator against the requested
12853 * CCCH tx Allocations.
12854 * Scans through the scheduled list of msg4 trans
12855 * fills the corresponding pdcch, adds the hqProc to
12856 * the corresponding SubFrm and removes the hqP from
12859 * Invoked by: Common Scheduler
12861 * @param[in] RgSchCellCb *cell
12862 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12867 PRIVATE Void rgSCHCmnDlCcchTxFnlz
12870 RgSchCmnDlRbAllocInfo *allocInfo
12873 PRIVATE Void rgSCHCmnDlCcchTxFnlz(cell, allocInfo)
12875 RgSchCmnDlRbAllocInfo *allocInfo;
12880 RgSchDlRbAlloc *rbAllocInfo;
12881 RgSchDlHqProcCb *hqP;
12882 RgSchLchAllocInfo lchSchdData;
12883 TRC2(rgSCHCmnDlCcchTxFnlz);
12885 /* Traverse through the Scheduled Retx List */
12886 node = allocInfo->msg4Alloc.schdMsg4TxLst.first;
12889 hqP = (RgSchDlHqProcCb *)(node->node);
12890 raCb = hqP->hqE->raCb;
12892 rbAllocInfo = &raCb->rbAllocInfo;
12894 /* fill the pdcch and HqProc */
12895 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12896 /* MSG4 Fix Start */
12898 rgSCHRamRmvFrmRaInfoSchdLst(cell, raCb);
12901 /* Indicate DHM of the CCCH LC scheduling */
12902 lchSchdData.lcId = 0;
12903 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12904 (RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE);
12905 /* TRansmitting presence of cont Res CE across MAC-SCH interface to
12906 * identify CCCH SDU transmissions which need to be done
12908 * contention resolution CE*/
12909 hqP->tbInfo[0].contResCe = PRSNT_NODEF;
12910 /*Dont add lc if only cont res CE is being transmitted*/
12911 if(raCb->dlCcchInfo.bo)
12913 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12918 /* Fix: syed dlAllocCb reset should be performed.
12919 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12920 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12921 rgSCHCmnDlHqPResetTemp(hqP);
12923 node = allocInfo->msg4Alloc.nonSchdMsg4TxLst.first;
12926 hqP = (RgSchDlHqProcCb *)(node->node);
12927 raCb = hqP->hqE->raCb;
12929 rbAllocInfo = &raCb->rbAllocInfo;
12930 /* Release HqProc */
12931 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12932 /*Fix: Removing releasing of TB1 as it will not exist for MSG4 and hence caused a crash*/
12933 /* rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12934 /* reset the UE allocation Information */
12935 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12936 rgSCHCmnDlHqPResetTemp(hqP);
12943 * @brief This function calculates the BI Index to be sent in the Bi header
12947 * Function: rgSCHCmnGetBiIndex
12948 * Purpose: This function Processes utilizes the previous BI time value
12949 * calculated and the difference last BI sent time and current time. To
12950 * calculate the latest BI Index. It also considers the how many UE's
12951 * Unserved in this subframe.
12953 * Invoked by: Common Scheduler
12955 * @param[in] RgSchCellCb *cell
12956 * @param[in] U32 ueCount
12961 PUBLIC U8 rgSCHCmnGetBiIndex
12967 PUBLIC U8 rgSCHCmnGetBiIndex(cell, ueCount)
12972 S16 prevVal = 0; /* To Store Intermediate Value */
12973 U16 newBiVal = 0; /* To store Bi Value in millisecond */
12977 TRC2(rgSCHCmnGetBiIndex)
12979 if (cell->biInfo.prevBiTime != 0)
12982 if(cell->emtcEnable == TRUE)
12984 timeDiff =(RGSCH_CALC_SF_DIFF_EMTC(cell->crntTime, cell->biInfo.biTime));
12989 timeDiff =(RGSCH_CALC_SF_DIFF(cell->crntTime, cell->biInfo.biTime));
12992 prevVal = cell->biInfo.prevBiTime - timeDiff;
12998 newBiVal = RG_SCH_CMN_GET_BI_VAL(prevVal,ueCount);
12999 /* To be used next time when BI is calculated */
13001 if(cell->emtcEnable == TRUE)
13003 RGSCHCPYTIMEINFO_EMTC(cell->crntTime, cell->biInfo.biTime)
13008 RGSCHCPYTIMEINFO(cell->crntTime, cell->biInfo.biTime)
13011 /* Search the actual BI Index from table Backoff Parameters Value and
13012 * return that Index */
13015 if (rgSchCmnBiTbl[idx] > newBiVal)
13020 }while(idx < RG_SCH_CMN_NUM_BI_VAL-1);
13021 cell->biInfo.prevBiTime = rgSchCmnBiTbl[idx];
13022 /* For 16 Entries in Table 7.2.1 36.321.880 - 3 reserved so total 13 Entries */
13023 RETVALUE(idx); /* Returning reserved value from table UE treats it has 960 ms */
13024 } /* rgSCHCmnGetBiIndex */
13028 * @brief This function Processes the Final Allocations
13029 * made by the RB Allocator against the requested
13030 * RAR allocations. Assumption: The reuqested
13031 * allocations are always satisfied completely.
13032 * Hence no roll back.
13036 * Function: rgSCHCmnDlRaRspFnlz
13037 * Purpose: This function Processes the Final Allocations
13038 * made by the RB Allocator against the requested.
13039 * Takes care of PDCCH filling.
13041 * Invoked by: Common Scheduler
13043 * @param[in] RgSchCellCb *cell
13044 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13049 PRIVATE Void rgSCHCmnDlRaRspFnlz
13052 RgSchCmnDlRbAllocInfo *allocInfo
13055 PRIVATE Void rgSCHCmnDlRaRspFnlz(cell, allocInfo)
13057 RgSchCmnDlRbAllocInfo *allocInfo;
13061 RgSchDlRbAlloc *raRspAlloc;
13062 RgSchDlSf *subFrm = NULLP;
13066 RgSchRaReqInfo *raReq;
13068 RgSchUlAlloc *ulAllocRef=NULLP;
13069 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13070 U8 allocRapidCnt = 0;
13072 U32 msg3SchdIdx = 0;
13073 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
13077 TRC2(rgSCHCmnDlRaRspFnlz);
13079 for (rarCnt=0; rarCnt<RG_SCH_CMN_MAX_CMN_PDCCH; rarCnt++)
13081 raRspAlloc = &allocInfo->raRspAlloc[rarCnt];
13082 /* Having likely condition first for optimization */
13083 if (!raRspAlloc->pdcch)
13089 subFrm = raRspAlloc->dlSf;
13090 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13091 /* Corrected RACH handling for multiple RAPIDs per RARNTI */
13092 allocRapidCnt = raRspAlloc->numRapids;
13093 while (allocRapidCnt)
13095 raReq = (RgSchRaReqInfo *)(reqLst->first->node);
13096 /* RACHO: If dedicated preamble, then allocate UL Grant
13097 * (consequence of handover/pdcchOrder) and continue */
13098 if (RGSCH_IS_DEDPRM(cell, raReq->raReq.rapId))
13100 rgSCHCmnHdlHoPo(cell, &subFrm->raRsp[rarCnt].contFreeUeLst,
13102 cmLListDelFrm(reqLst, reqLst->first);
13104 /* ccpu00117052 - MOD - Passing double pointer
13105 for proper NULLP assignment*/
13106 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13107 sizeof(RgSchRaReqInfo));
13111 if(cell->overLoadBackOffEnab)
13112 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13113 cmLListDelFrm(reqLst, reqLst->first);
13115 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13116 sizeof(RgSchRaReqInfo));
13119 /* Attempt to include each RA request into the RSP */
13120 /* Any failure in the procedure is considered to */
13121 /* affect futher allocations in the same TTI. When */
13122 /* a failure happens, we break out and complete */
13123 /* the processing for random access */
13124 if (rgSCHRamCreateRaCb(cell, &raCb, &err) != ROK)
13128 /* Msg3 allocation request to USM */
13129 if (raReq->raReq.rapId < cell->rachCfg.sizeRaPreambleGrpA)
13133 /*ccpu00128820 - MOD - Msg3 alloc double delete issue*/
13134 rgSCHCmnMsg3GrntReq(cell, raCb->tmpCrnti, preamGrpA, \
13135 &(raCb->msg3HqProc), &ulAllocRef, &raCb->msg3HqProcId);
13136 if (ulAllocRef == NULLP)
13138 rgSCHRamDelRaCb(cell, raCb, TRUE);
13141 if (raReq->raReq.cqiPres)
13143 raCb->ccchCqi = raReq->raReq.cqiIdx;
13147 raCb->ccchCqi = cellDl->ccchCqi;
13149 raCb->rapId = raReq->raReq.rapId;
13150 raCb->ta.pres = TRUE;
13151 raCb->ta.val = raReq->raReq.ta;
13152 raCb->msg3Grnt = ulAllocRef->grnt;
13153 /* Populating the tpc value received */
13154 raCb->msg3Grnt.tpc = raReq->raReq.tpc;
13155 /* PHR handling for MSG3 */
13156 ulAllocRef->raCb = raCb;
13158 /* To the crntTime, add the MIN time at which UE will
13159 * actually send MSG3 i.e DL_DELTA+6 */
13160 raCb->msg3AllocTime = cell->crntTime;
13161 RGSCH_INCR_SUB_FRAME(raCb->msg3AllocTime, RG_SCH_CMN_MIN_MSG3_RECP_INTRVL);
13163 msg3SchdIdx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) %
13164 RGSCH_NUM_SUB_FRAMES;
13165 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
13166 special subframe */
13167 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][msg3SchdIdx] !=
13168 RG_SCH_TDD_UL_SUBFRAME)
13170 RGSCHCMNADDTOCRNTTIME(cell->crntTime,raCb->msg3AllocTime,
13171 RG_SCH_CMN_DL_DELTA)
13172 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][
13173 raCb->msg3AllocTime.slot];
13174 RGSCHCMNADDTOCRNTTIME(raCb->msg3AllocTime, raCb->msg3AllocTime,
13178 cmLListAdd2Tail(&subFrm->raRsp[rarCnt].raRspLst, &raCb->rspLnk);
13179 raCb->rspLnk.node = (PTR)raCb;
13180 cmLListDelFrm(reqLst, reqLst->first);
13182 /* ccpu00117052 - MOD - Passing double pointer
13183 for proper NULLP assignment*/
13184 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13185 sizeof(RgSchRaReqInfo));
13187 /* SR_RACH_STATS : RAR scheduled */
13192 /* Fill subframe data members */
13193 subFrm->raRsp[rarCnt].raRnti = raRspAlloc->rnti;
13194 subFrm->raRsp[rarCnt].pdcch = raRspAlloc->pdcch;
13195 subFrm->raRsp[rarCnt].tbSz = raRspAlloc->tbInfo[0].bytesAlloc;
13196 /* Fill PDCCH data members */
13197 rgSCHCmnFillPdcch(cell, subFrm->raRsp[rarCnt].pdcch, raRspAlloc);
13200 if(cell->overLoadBackOffEnab)
13201 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13202 subFrm->raRsp[rarCnt].backOffInd.pres = PRSNT_NODEF;
13203 subFrm->raRsp[rarCnt].backOffInd.val = cell->overLoadBackOffval;
13208 subFrm->raRsp[rarCnt].backOffInd.pres = NOTPRSNT;
13211 /*[ccpu00125212] Avoiding sending of empty RAR in case of RAR window
13212 is short and UE is sending unauthorised preamble.*/
13213 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13214 if ((raRspAlloc->biEstmt) && (reqLst->count))
13216 subFrm->raRsp[0].backOffInd.pres = PRSNT_NODEF;
13217 /* Added as part of Upgrade */
13218 subFrm->raRsp[0].backOffInd.val =
13219 rgSCHCmnGetBiIndex(cell, reqLst->count);
13221 /* SR_RACH_STATS : Back Off Inds */
13225 else if ((subFrm->raRsp[rarCnt].raRspLst.first == NULLP) &&
13226 (subFrm->raRsp[rarCnt].contFreeUeLst.first == NULLP))
13228 /* Return the grabbed PDCCH */
13229 rgSCHUtlPdcchPut(cell, &subFrm->pdcchInfo, raRspAlloc->pdcch);
13230 subFrm->raRsp[rarCnt].pdcch = NULLP;
13231 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRaRspAlloc(): "
13232 "Not even one RaReq.");
13236 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId,
13237 "RNTI:%d Scheduled RAR @ (%u,%u) ",
13239 cell->crntTime.sfn,
13240 cell->crntTime.slot);
13246 * @brief This function computes rv.
13250 * Function: rgSCHCmnDlCalcRvForBcch
13251 * Purpose: This function computes rv.
13253 * Invoked by: Common Scheduler
13255 * @param[in] RgSchCellCb *cell
13256 * @param[in] Bool si
13262 PRIVATE U8 rgSCHCmnDlCalcRvForBcch
13269 PRIVATE U8 rgSCHCmnDlCalcRvForBcch(cell, si, i)
13276 CmLteTimingInfo frm;
13277 TRC2(rgSCHCmnDlCalcRvForBcch);
13279 frm = cell->crntTime;
13280 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
13288 k = (frm.sfn/2) % 4;
13290 rv = RGSCH_CEIL(3*k, 2) % 4;
13295 * @brief This function Processes the Final Allocations
13296 * made by the RB Allocator against the requested
13297 * BCCH/PCCH allocations. Assumption: The reuqested
13298 * allocations are always satisfied completely.
13299 * Hence no roll back.
13303 * Function: rgSCHCmnDlBcchPcchFnlz
13304 * Purpose: This function Processes the Final Allocations
13305 * made by the RB Allocator against the requested.
13306 * Takes care of PDCCH filling.
13308 * Invoked by: Common Scheduler
13310 * @param[in] RgSchCellCb *cell
13311 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13316 PRIVATE Void rgSCHCmnDlBcchPcchFnlz
13319 RgSchCmnDlRbAllocInfo *allocInfo
13322 PRIVATE Void rgSCHCmnDlBcchPcchFnlz(cell, allocInfo)
13324 RgSchCmnDlRbAllocInfo *allocInfo;
13327 RgSchDlRbAlloc *rbAllocInfo;
13331 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
13333 #ifdef LTEMAC_HDFDD
13334 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13336 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13340 /* Moving variables to available scope for optimization */
13341 RgSchClcDlLcCb *pcch;
13344 RgSchClcDlLcCb *bcch;
13347 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13349 TRC2(rgSCHCmnDlBcchPcchFnlz);
13352 rbAllocInfo = &allocInfo->pcchAlloc;
13353 if (rbAllocInfo->pdcch)
13355 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13357 /* Added sfIdx calculation for TDD as well */
13359 #ifdef LTEMAC_HDFDD
13360 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13362 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13365 subFrm = rbAllocInfo->dlSf;
13366 pcch = rgSCHDbmGetPcch(cell);
13369 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnDlBcchPcchFnlz( ): "
13370 "No Pcch Present");
13374 /* Added Dl TB count for paging message transmission*/
13376 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13378 bo = (RgSchClcBoRpt *)pcch->boLst.first->node;
13379 cmLListDelFrm(&pcch->boLst, &bo->boLstEnt);
13380 /* ccpu00117052 - MOD - Passing double pointer
13381 for proper NULLP assignment*/
13382 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13383 /* Fill subframe data members */
13384 subFrm->pcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13385 subFrm->pcch.pdcch = rbAllocInfo->pdcch;
13386 /* Fill PDCCH data members */
13387 rgSCHCmnFillPdcch(cell, subFrm->pcch.pdcch, rbAllocInfo);
13388 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, pcch->lcId, TRUE);
13389 /* ccpu00132314-ADD-Update the tx power allocation info
13390 TODO-Need to add a check for max tx power per symbol */
13391 subfrmAlloc->cmnLcInfo.pcchInfo.txPwrOffset = cellDl->pcchTxPwrOffset;
13395 rbAllocInfo = &allocInfo->bcchAlloc;
13396 if (rbAllocInfo->pdcch)
13398 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13400 #ifdef LTEMAC_HDFDD
13401 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13403 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13406 subFrm = rbAllocInfo->dlSf;
13408 /* Fill subframe data members */
13409 subFrm->bcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13410 subFrm->bcch.pdcch = rbAllocInfo->pdcch;
13411 /* Fill PDCCH data members */
13412 rgSCHCmnFillPdcch(cell, subFrm->bcch.pdcch, rbAllocInfo);
13414 if(rbAllocInfo->schdFirst)
13417 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
13418 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13420 /*Copy the SIB1 msg buff into interface buffer */
13421 SCpyMsgMsg(cell->siCb.crntSiInfo.sib1Info.sib1,
13422 rgSchCb[cell->instIdx].rgSchInit.region,
13423 rgSchCb[cell->instIdx].rgSchInit.pool,
13424 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13425 #endif/*RGR_SI_SCH*/
13426 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13427 rgSCHCmnDlCalcRvForBcch(cell, FALSE, 0);
13435 i = cell->siCb.siCtx.i;
13436 /*Decrement the retransmission count */
13437 cell->siCb.siCtx.retxCntRem--;
13439 /*Copy the SI msg buff into interface buffer */
13440 if(cell->siCb.siCtx.warningSiFlag == FALSE)
13442 SCpyMsgMsg(cell->siCb.siArray[cell->siCb.siCtx.siId-1].si,
13443 rgSchCb[cell->instIdx].rgSchInit.region,
13444 rgSchCb[cell->instIdx].rgSchInit.pool,
13445 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13449 pdu = rgSCHUtlGetWarningSiPdu(cell);
13450 RGSCH_NULL_CHECK(cell->instIdx, pdu);
13452 rgSchCb[cell->instIdx].rgSchInit.region,
13453 rgSchCb[cell->instIdx].rgSchInit.pool,
13454 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13455 if(cell->siCb.siCtx.retxCntRem == 0)
13457 rgSCHUtlFreeWarningSiPdu(cell);
13458 cell->siCb.siCtx.warningSiFlag = FALSE;
13463 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
13464 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13466 if(bo->retxCnt != cell->siCfg.retxCnt-1)
13471 #endif/*RGR_SI_SCH*/
13472 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13473 rgSCHCmnDlCalcRvForBcch(cell, TRUE, i);
13476 /* Added Dl TB count for SIB1 and SI messages transmission.
13477 * This counter will be incremented only for the first transmission
13478 * (with RV 0) of these messages*/
13480 if(subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv == 0)
13482 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13486 if(bo->retxCnt == 0)
13488 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
13489 /* ccpu00117052 - MOD - Passing double pointer
13490 for proper NULLP assignment*/
13491 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13493 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, bcch->lcId, sendInd);
13495 /*Fill the interface info */
13496 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, NULLD, NULLD);
13498 /* ccpu00132314-ADD-Update the tx power allocation info
13499 TODO-Need to add a check for max tx power per symbol */
13500 subfrmAlloc->cmnLcInfo.bcchInfo.txPwrOffset = cellDl->bcchTxPwrOffset;
13502 /*mBuf has been already copied above */
13503 #endif/*RGR_SI_SCH*/
13516 * Function: rgSCHCmnUlSetAllUnSched
13519 * Invoked by: Common Scheduler
13521 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13526 PRIVATE Void rgSCHCmnUlSetAllUnSched
13528 RgSchCmnUlRbAllocInfo *allocInfo
13531 PRIVATE Void rgSCHCmnUlSetAllUnSched(allocInfo)
13532 RgSchCmnUlRbAllocInfo *allocInfo;
13537 TRC2(rgSCHCmnUlSetAllUnSched);
13539 node = allocInfo->contResLst.first;
13542 rgSCHCmnUlMov2NonSchdCntResLst(allocInfo, (RgSchUeCb *)node->node);
13543 node = allocInfo->contResLst.first;
13546 node = allocInfo->retxUeLst.first;
13549 rgSCHCmnUlMov2NonSchdRetxUeLst(allocInfo, (RgSchUeCb *)node->node);
13550 node = allocInfo->retxUeLst.first;
13553 node = allocInfo->ueLst.first;
13556 rgSCHCmnUlMov2NonSchdUeLst(allocInfo, (RgSchUeCb *)node->node);
13557 node = allocInfo->ueLst.first;
13569 * Function: rgSCHCmnUlAdd2CntResLst
13572 * Invoked by: Common Scheduler
13574 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13575 * @param[in] RgSchUeCb *ue
13580 PUBLIC Void rgSCHCmnUlAdd2CntResLst
13582 RgSchCmnUlRbAllocInfo *allocInfo,
13586 PUBLIC Void rgSCHCmnUlAdd2CntResLst(allocInfo, ue)
13587 RgSchCmnUlRbAllocInfo *allocInfo;
13591 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,ue->cell))->alloc);
13592 TRC2(rgSCHCmnUlAdd2CntResLst);
13593 cmLListAdd2Tail(&allocInfo->contResLst, &ulAllocInfo->reqLnk);
13594 ulAllocInfo->reqLnk.node = (PTR)ue;
13603 * Function: rgSCHCmnUlAdd2UeLst
13606 * Invoked by: Common Scheduler
13608 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13609 * @param[in] RgSchUeCb *ue
13614 PUBLIC Void rgSCHCmnUlAdd2UeLst
13617 RgSchCmnUlRbAllocInfo *allocInfo,
13621 PUBLIC Void rgSCHCmnUlAdd2UeLst(cell, allocInfo, ue)
13623 RgSchCmnUlRbAllocInfo *allocInfo;
13627 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,cell))->alloc);
13628 TRC2(rgSCHCmnUlAdd2UeLst);
13629 if (ulAllocInfo->reqLnk.node == NULLP)
13631 cmLListAdd2Tail(&allocInfo->ueLst, &ulAllocInfo->reqLnk);
13632 ulAllocInfo->reqLnk.node = (PTR)ue;
13642 * Function: rgSCHCmnAllocUlRb
13643 * Purpose: To do RB allocations for uplink
13645 * Invoked by: Common Scheduler
13647 * @param[in] RgSchCellCb *cell
13648 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
13652 PUBLIC Void rgSCHCmnAllocUlRb
13655 RgSchCmnUlRbAllocInfo *allocInfo
13658 PUBLIC Void rgSCHCmnAllocUlRb(cell, allocInfo)
13660 RgSchCmnUlRbAllocInfo *allocInfo;
13663 RgSchUlSf *sf = allocInfo->sf;
13664 TRC2(rgSCHCmnAllocUlRb);
13666 /* Schedule for new transmissions */
13667 rgSCHCmnUlRbAllocForLst(cell, sf, allocInfo->ueLst.count,
13668 &allocInfo->ueLst, &allocInfo->schdUeLst,
13669 &allocInfo->nonSchdUeLst, (Bool)TRUE);
13673 /***********************************************************
13675 * Func : rgSCHCmnUlRbAllocForLst
13677 * Desc : Allocate for a list in cmn rb alloc information passed
13686 **********************************************************/
13688 PRIVATE Void rgSCHCmnUlRbAllocForLst
13694 CmLListCp *schdLst,
13695 CmLListCp *nonSchdLst,
13699 PRIVATE Void rgSCHCmnUlRbAllocForLst(cell, sf, count, reqLst, schdLst,
13700 nonSchdLst, isNewTx)
13705 CmLListCp *schdLst;
13706 CmLListCp *nonSchdLst;
13715 CmLteTimingInfo timeInfo;
13718 TRC2(rgSCHCmnUlRbAllocForLst);
13720 if(schdLst->count == 0)
13722 cmLListInit(schdLst);
13725 cmLListInit(nonSchdLst);
13727 if(isNewTx == TRUE)
13729 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.numUes = (U8) count;
13731 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime, timeInfo, TFU_ULCNTRL_DLDELTA);
13732 k = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][timeInfo.subframe];
13733 RG_SCH_ADD_TO_CRNT_TIME(timeInfo,
13734 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo, k);
13736 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime,cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo,
13737 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
13742 for (lnk = reqLst->first; count; lnk = lnk->next, --count)
13744 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13745 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13750 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
13755 ueUl->subbandShare = ueUl->subbandRequired;
13756 if(isNewTx == TRUE)
13758 maxRb = RGSCH_MIN((ueUl->subbandRequired * MAX_5GTF_VRBG_SIZE), ue->ue5gtfCb.maxPrb);
13760 ret = rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole);
13763 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, schdLst);
13764 rgSCHCmnUlUeFillAllocInfo(cell, ue);
13768 gUl5gtfRbAllocFail++;
13769 #if defined (TENB_STATS) && defined (RG_5GTF)
13770 cell->tenbStats->sch.ul5gtfRbAllocFail++;
13772 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13773 ue->isMsg4PdcchWithCrnti = FALSE;
13774 ue->isSrGrant = FALSE;
13777 if(isNewTx == TRUE)
13779 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13780 ulAllocInfo[count - 1].rnti = ue->ueId;
13781 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13782 ulAllocInfo[count - 1].numPrb = ue->ul.nPrb;
13785 ueUl->subbandShare = 0; /* This reset will take care of
13786 * all scheduler types */
13788 for (; count; lnk = lnk->next, --count)
13790 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13791 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13792 ue->isMsg4PdcchWithCrnti = FALSE;
13798 /***********************************************************
13800 * Func : rgSCHCmnUlMdfyGrntForCqi
13802 * Desc : Modify UL Grant to consider presence of
13803 * CQI along with PUSCH Data.
13808 * - Scale down iTbs based on betaOffset and
13809 * size of Acqi Size.
13810 * - Optionally attempt to increase numSb by 1
13811 * if input payload size does not fit in due
13812 * to reduced tbSz as a result of iTbsNew.
13816 **********************************************************/
13819 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi
13831 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi(cell, ue, maxRb, numSb, iTbs, hqSz, stepDownItbs, effTgt)
13842 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(ue->cell);
13847 U32 remREsForPusch;
13850 U32 betaOffVal = ue->ul.betaOffstVal;
13851 U32 cqiRiRptSz = ue->ul.cqiRiSz;
13852 U32 betaOffHqVal = rgSchCmnBetaHqOffstTbl[ue->ul.betaHqOffst];
13853 U32 resNumSb = *numSb;
13854 U32 puschEff = 1000;
13857 Bool mdfyiTbsFlg = FALSE;
13858 U8 resiTbs = *iTbs;
13860 TRC2(rgSCHCmnUlMdfyGrntForCqi)
13865 iMcs = rgSCHCmnUlGetIMcsFrmITbs(resiTbs, RG_SCH_CMN_GET_UE_CTGY(ue));
13866 RG_SCH_UL_MCS_TO_MODODR(iMcs, modOdr);
13867 if (RG_SCH_CMN_GET_UE_CTGY(ue) != CM_LTE_UE_CAT_5)
13869 modOdr = RGSCH_MIN(RGSCH_QM_QPSK, modOdr);
13873 modOdr = RGSCH_MIN(RGSCH_QM_64QAM, modOdr);
13875 nPrb = resNumSb * cellUl->sbSize;
13876 /* Restricting the minumum iTbs requried to modify to 10 */
13877 if ((nPrb >= maxRb) && (resiTbs <= 10))
13879 /* Could not accomodate ACQI */
13882 totREs = nPrb * RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
13883 tbSz = rgTbSzTbl[0][resiTbs][nPrb-1];
13884 /* totalREs/tbSz = num of bits perRE. */
13885 cqiRiREs = (totREs * betaOffVal * cqiRiRptSz)/(1000 * tbSz); /* betaOffVal is represented
13886 as parts per 1000 */
13887 hqREs = (totREs * betaOffHqVal * hqSz)/(1000 * tbSz);
13888 if ((cqiRiREs + hqREs) < totREs)
13890 remREsForPusch = totREs - cqiRiREs - hqREs;
13891 bitsPerRe = (tbSz * 1000)/remREsForPusch; /* Multiplying by 1000 for Interger Oper */
13892 puschEff = bitsPerRe/modOdr;
13894 if (puschEff < effTgt)
13896 /* ensure resultant efficiency for PUSCH Data is within 0.93*/
13901 /* Alternate between increasing SB or decreasing iTbs until eff is met */
13902 if (mdfyiTbsFlg == FALSE)
13906 resNumSb = resNumSb + 1;
13908 mdfyiTbsFlg = TRUE;
13914 resiTbs-= stepDownItbs;
13916 mdfyiTbsFlg = FALSE;
13919 }while (1); /* Loop breaks if efficency is met
13920 or returns RFAILED if not able to meet the efficiency */
13929 /***********************************************************
13931 * Func : rgSCHCmnUlRbAllocForUe
13933 * Desc : Do uplink RB allocation for an UE.
13937 * Notes: Note that as of now, for retx, maxRb
13938 * is not considered. Alternatives, such
13939 * as dropping retx if it crosses maxRb
13940 * could be considered.
13944 **********************************************************/
13946 PRIVATE S16 rgSCHCmnUlRbAllocForUe
13955 PRIVATE S16 rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole)
13963 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
13964 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13965 RgSchUlAlloc *alloc = NULLP;
13971 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->schdHqProcIdx];
13973 RgSchUlHqProcCb *proc = NULLP;
13979 TfuDciFormat dciFrmt;
13983 TRC2(rgSCHCmnUlRbAllocForUe);
13985 rgSCHUhmGetAvlHqProc(cell, ue, &proc);
13988 //printf("UE [%d] HQ Proc unavailable\n", ue->ueId);
13993 if (ue->ue5gtfCb.rank == 2)
13995 dciFrmt = TFU_DCI_FORMAT_A2;
14000 dciFrmt = TFU_DCI_FORMAT_A1;
14003 /* 5gtf TODO : To pass dci frmt to this function */
14004 pdcch = rgSCHCmnPdcchAllocCrntSf(cell, ue);
14007 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
14008 "rgSCHCmnUlRbAllocForUe(): Could not get PDCCH for CRNTI:%d",ue->ueId);
14011 gUl5gtfPdcchSchd++;
14012 #if defined (TENB_STATS) && defined (RG_5GTF)
14013 cell->tenbStats->sch.ul5gtfPdcchSchd++;
14016 //TODO_SID using configured prb as of now
14017 nPrb = ue->ue5gtfCb.maxPrb;
14018 reqVrbg = nPrb/MAX_5GTF_VRBG_SIZE;
14019 iMcs = ue->ue5gtfCb.mcs; //gSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
14023 if((sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart > MAX_5GTF_VRBG)
14024 || (sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated > MAX_5GTF_VRBG))
14026 printf("5GTF_ERROR vrbg > 25 valstart = %d valalloc %d\n", sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart
14027 , sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated);
14032 /*TODO_SID: Workaround for alloc. Currently alloc is ulsf based. To handle multiple beams, we need a different
14033 design. Now alloc are formed based on MAX_5GTF_UE_SCH macro. */
14034 numVrbgTemp = MAX_5GTF_VRBG/MAX_5GTF_UE_SCH;
14037 alloc = rgSCHCmnUlSbAlloc(sf, numVrbgTemp,\
14040 if (alloc == NULLP)
14042 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
14043 "rgSCHCmnUlRbAllocForUe(): Could not get UlAlloc %d CRNTI:%d",numVrbg,ue->ueId);
14044 rgSCHCmnPdcchRlsCrntSf(cell, pdcch);
14047 gUl5gtfAllocAllocated++;
14048 #if defined (TENB_STATS) && defined (RG_5GTF)
14049 cell->tenbStats->sch.ul5gtfAllocAllocated++;
14051 alloc->grnt.vrbgStart = sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart;
14052 alloc->grnt.numVrbg = numVrbg;
14053 alloc->grnt.numLyr = numLyr;
14054 alloc->grnt.dciFrmt = dciFrmt;
14056 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart += numVrbg;
14057 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated += numVrbg;
14059 //rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
14061 sf->totPrb += alloc->grnt.numRb;
14062 ue->ul.nPrb = alloc->grnt.numRb;
14064 if (ue->csgMmbrSta != TRUE)
14066 cellUl->ncsgPrbCnt += alloc->grnt.numRb;
14068 cellUl->totPrbCnt += (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14069 alloc->pdcch = pdcch;
14070 alloc->grnt.iMcs = iMcs;
14071 alloc->grnt.iMcsCrnt = iMcsCrnt;
14072 alloc->grnt.hop = 0;
14073 /* Initial Num RBs support for UCI on PUSCH */
14075 ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14077 alloc->forMsg3 = FALSE;
14078 //RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTb5gtfSzTbl[0], (iTbs));
14080 //ueUl->alloc.allocdBytes = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
14081 /* TODO_SID Allocating based on configured MCS as of now.
14082 Currently for format A2. When doing multi grp per tti, need to update this. */
14083 ueUl->alloc.allocdBytes = (rgSch5gtfTbSzTbl[iMcs]/8) * ue->ue5gtfCb.rank;
14085 alloc->grnt.datSz = ueUl->alloc.allocdBytes;
14086 //TODO_SID Need to check mod order.
14087 RG_SCH_CMN_TBS_TO_MODODR(iMcs, alloc->grnt.modOdr);
14088 //alloc->grnt.modOdr = 6;
14089 alloc->grnt.isRtx = FALSE;
14091 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
14092 alloc->grnt.SCID = 0;
14093 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
14094 alloc->grnt.PMI = 0;
14095 alloc->grnt.uciOnxPUSCH = 0;
14096 alloc->grnt.hqProcId = proc->procId;
14098 alloc->hqProc = proc;
14099 alloc->hqProc->ulSfIdx = cellUl->schdIdx;
14101 /*commenting to retain the rnti used for transmission SPS/c-rnti */
14102 alloc->rnti = ue->ueId;
14103 ueUl->alloc.alloc = alloc;
14104 /*rntiwari-Adding the debug for generating the graph.*/
14105 /* No grant attr recorded now */
14109 /***********************************************************
14111 * Func : rgSCHCmnUlRbAllocAddUeToLst
14113 * Desc : Add UE to list (scheduled/non-scheduled list)
14114 * for UL RB allocation information.
14122 **********************************************************/
14124 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst
14131 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst(cell, ue, lst)
14137 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
14138 TRC2(rgSCHCmnUlRbAllocAddUeToLst);
14141 gUl5gtfUeRbAllocDone++;
14142 #if defined (TENB_STATS) && defined (RG_5GTF)
14143 cell->tenbStats->sch.ul5gtfUeRbAllocDone++;
14145 cmLListAdd2Tail(lst, &ueUl->alloc.schdLstLnk);
14146 ueUl->alloc.schdLstLnk.node = (PTR)ue;
14151 * @brief This function Processes the Final Allocations
14152 * made by the RB Allocator against the requested.
14156 * Function: rgSCHCmnUlAllocFnlz
14157 * Purpose: This function Processes the Final Allocations
14158 * made by the RB Allocator against the requested.
14160 * Invoked by: Common Scheduler
14162 * @param[in] RgSchCellCb *cell
14163 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14168 PRIVATE Void rgSCHCmnUlAllocFnlz
14171 RgSchCmnUlRbAllocInfo *allocInfo
14174 PRIVATE Void rgSCHCmnUlAllocFnlz(cell, allocInfo)
14176 RgSchCmnUlRbAllocInfo *allocInfo;
14179 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14180 TRC2(rgSCHCmnUlAllocFnlz);
14182 /* call scheduler specific Finalization */
14183 cellSch->apisUl->rgSCHUlAllocFnlz(cell, allocInfo);
14189 * @brief This function Processes the Final Allocations
14190 * made by the RB Allocator against the requested.
14194 * Function: rgSCHCmnDlAllocFnlz
14195 * Purpose: This function Processes the Final Allocations
14196 * made by the RB Allocator against the requested.
14198 * Invoked by: Common Scheduler
14200 * @param[in] RgSchCellCb *cell
14205 PUBLIC Void rgSCHCmnDlAllocFnlz
14210 PUBLIC Void rgSCHCmnDlAllocFnlz(cell)
14214 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14215 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
14217 TRC2(rgSCHCmnDlAllocFnlz);
14219 rgSCHCmnDlCcchRetxFnlz(cell, allocInfo);
14220 rgSCHCmnDlCcchTxFnlz(cell, allocInfo);
14222 /* Added below functions for handling CCCH SDU transmission received
14224 * * guard timer expiry*/
14225 rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo);
14226 rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo);
14228 rgSCHCmnDlRaRspFnlz(cell, allocInfo);
14229 /* call scheduler specific Finalization */
14230 cellSch->apisDl->rgSCHDlAllocFnlz(cell, allocInfo);
14232 /* Stack Crash problem for TRACE5 Changes. Added the return below */
14239 * @brief Update an uplink subframe.
14243 * Function : rgSCHCmnUlUpdSf
14245 * For each allocation
14246 * - if no more tx needed
14247 * - Release allocation
14249 * - Perform retransmission
14251 * @param[in] RgSchUlSf *sf
14255 PRIVATE Void rgSCHCmnUlUpdSf
14258 RgSchCmnUlRbAllocInfo *allocInfo,
14262 PRIVATE Void rgSCHCmnUlUpdSf(cell, allocInfo, sf)
14264 RgSchCmnUlRbAllocInfo *allocInfo;
14269 TRC2(rgSCHCmnUlUpdSf);
14271 while ((lnk = sf->allocs.first))
14273 RgSchUlAlloc *alloc = (RgSchUlAlloc *)lnk->node;
14276 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
14281 /* If need to handle all retx together, run another loop separately */
14282 rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc);
14284 rgSCHCmnUlRlsUlAlloc(cell, sf, alloc);
14287 /* By this time, all allocs would have been cleared and
14288 * SF is reset to be made ready for new allocations. */
14289 rgSCHCmnUlSfReset(cell, sf);
14290 /* In case there are timing problems due to msg3
14291 * allocations being done in advance, (which will
14292 * probably happen with the current FDD code that
14293 * handles 8 subframes) one solution
14294 * could be to hold the (recent) msg3 allocs in a separate
14295 * list, and then possibly add that to the actual
14296 * list later. So at this time while allocations are
14297 * traversed, the recent msg3 ones are not seen. Anytime after
14298 * this (a good time is when the usual allocations
14299 * are made), msg3 allocations could be transferred to the
14300 * normal list. Not doing this now as it is assumed
14301 * that incorporation of TDD shall take care of this.
14309 * @brief Handle uplink allocation for retransmission.
14313 * Function : rgSCHCmnUlHndlAllocRetx
14315 * Processing Steps:
14316 * - Add to queue for retx.
14317 * - Do not release here, release happends as part
14318 * of the loop that calls this function.
14320 * @param[in] RgSchCellCb *cell
14321 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14322 * @param[in] RgSchUlSf *sf
14323 * @param[in] RgSchUlAlloc *alloc
14327 PRIVATE Void rgSCHCmnUlHndlAllocRetx
14330 RgSchCmnUlRbAllocInfo *allocInfo,
14332 RgSchUlAlloc *alloc
14335 PRIVATE Void rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc)
14337 RgSchCmnUlRbAllocInfo *allocInfo;
14339 RgSchUlAlloc *alloc;
14343 RgSchCmnUlUe *ueUl;
14344 TRC2(rgSCHCmnUlHndlAllocRetx);
14346 rgTbSzTbl[0][rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs)]\
14347 [alloc->grnt.numRb-1]/8;
14348 if (!alloc->forMsg3)
14350 ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue);
14351 ueUl->alloc.reqBytes = bytes;
14352 rgSCHUhmRetx(alloc->hqProc);
14353 rgSCHCmnUlAdd2RetxUeLst(allocInfo, alloc->ue);
14357 /* RACHO msg3 retx handling. Part of RACH procedure changes. */
14358 retxAlloc = rgSCHCmnUlGetUlAlloc(cell, sf, alloc->numSb);
14359 if (retxAlloc == NULLP)
14361 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
14362 "rgSCHCmnUlRbAllocForUe():Could not get UlAlloc for msg3Retx RNTI:%d",
14366 retxAlloc->grnt.iMcs = alloc->grnt.iMcs;
14367 retxAlloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl\
14368 [alloc->hqProc->rvIdx];
14369 retxAlloc->grnt.nDmrs = 0;
14370 retxAlloc->grnt.hop = 0;
14371 retxAlloc->grnt.delayBit = 0;
14372 retxAlloc->rnti = alloc->rnti;
14373 retxAlloc->ue = NULLP;
14374 retxAlloc->pdcch = FALSE;
14375 retxAlloc->forMsg3 = TRUE;
14376 retxAlloc->raCb = alloc->raCb;
14377 retxAlloc->hqProc = alloc->hqProc;
14378 rgSCHUhmRetx(retxAlloc->hqProc);
14385 * @brief Uplink Scheduling Handler.
14389 * Function: rgSCHCmnUlAlloc
14390 * Purpose: This function Handles Uplink Scheduling.
14392 * Invoked by: Common Scheduler
14394 * @param[in] RgSchCellCb *cell
14397 /* ccpu00132653- The definition of this function made common for TDD and FDD*/
14399 PRIVATE Void rgSCHCmnUlAlloc
14404 PRIVATE Void rgSCHCmnUlAlloc(cell)
14408 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14409 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
14410 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
14411 RgSchCmnUlRbAllocInfo allocInfo;
14412 RgSchCmnUlRbAllocInfo *allocInfoRef = &allocInfo;
14418 TRC2(rgSCHCmnUlAlloc);
14420 /* Initializing RgSchCmnUlRbAllocInfo structure */
14421 rgSCHCmnInitUlRbAllocInfo(allocInfoRef);
14423 /* Get Uplink Subframe */
14424 allocInfoRef->sf = &cellUl->ulSfArr[cellUl->schdIdx];
14426 /* initializing the UL PRB count */
14427 allocInfoRef->sf->totPrb = 0;
14431 rgSCHCmnSpsUlTti(cell, allocInfoRef);
14434 if(*allocInfoRef->sf->allocCountRef == 0)
14438 if ((hole = rgSCHUtlUlHoleFirst(allocInfoRef->sf)) != NULLP)
14440 /* Sanity check of holeDb */
14441 if (allocInfoRef->sf->holeDb->count == 1 && hole->start == 0)
14443 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
14444 /* Re-Initialize available subbands because of CFI change*/
14445 allocInfoRef->sf->availSubbands = cell->dynCfiCb.\
14446 bwInfo[cellDl->currCfi].numSb;
14447 /*Currently initializing 5gtf ulsf specific initialization here.
14448 need to do at proper place */
14450 allocInfoRef->sf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
14451 allocInfoRef->sf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
14452 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
14454 allocInfoRef->sf->sfBeamInfo[idx].totVrbgAllocated = 0;
14455 allocInfoRef->sf->sfBeamInfo[idx].totVrbgRequired = 0;
14456 allocInfoRef->sf->sfBeamInfo[idx].vrbgStart = 0;
14462 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
14463 "Error! holeDb sanity check failed");
14468 /* Fix: Adaptive re-transmissions prioritised over other transmissions */
14469 /* perform adaptive retransmissions */
14470 rgSCHCmnUlSfReTxAllocs(cell, allocInfoRef->sf);
14474 /* Fix: syed Adaptive Msg3 Retx crash. Release all
14475 Harq processes for which adap Retx failed, to avoid
14476 blocking. This step should be done before New TX
14477 scheduling to make hqProc available. Right now we
14478 dont check if proc is in adap Retx list for considering
14479 it to be available. But now with this release that
14480 functionality would be correct. */
14482 rgSCHCmnUlSfRlsRetxProcs(cell, allocInfoRef->sf);
14485 /* Specific UL scheduler to perform UE scheduling */
14486 cellSch->apisUl->rgSCHUlSched(cell, allocInfoRef);
14488 /* Call UL RB allocator module */
14489 rgSCHCmnAllocUlRb(cell, allocInfoRef);
14491 /* Do group power control for PUSCH */
14492 rgSCHCmnGrpPwrCntrlPusch(cell, allocInfoRef->sf);
14494 cell->sc.apis->rgSCHDrxStrtInActvTmrInUl(cell);
14496 rgSCHCmnUlAllocFnlz(cell, allocInfoRef);
14497 if(5000 == g5gtfTtiCnt)
14499 ul5gtfsidDlAlreadyMarkUl = 0;
14500 ul5gtfsidDlSchdPass = 0;
14501 ul5gtfsidUlMarkUl = 0;
14502 ul5gtfTotSchdCnt = 0;
14510 * @brief send Subframe Allocations.
14514 * Function: rgSCHCmnSndCnsldtInfo
14515 * Purpose: Send the scheduled
14516 * allocations to MAC for StaInd generation to Higher layers and
14517 * for MUXing. PST's RgInfSfAlloc to MAC instance.
14519 * Invoked by: Common Scheduler
14521 * @param[in] RgSchCellCb *cell
14525 PUBLIC Void rgSCHCmnSndCnsldtInfo
14530 PUBLIC Void rgSCHCmnSndCnsldtInfo(cell)
14534 RgInfSfAlloc *subfrmAlloc;
14536 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14538 TRC2(rgSCHCmnSndCnsldtInfo);
14540 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14542 /* Send the allocations to MAC for MUXing */
14543 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
14544 subfrmAlloc->cellId = cell->cellId;
14545 /* Populate the List of UEs needing PDB-based Flow control */
14546 cellSch->apisDl->rgSCHDlFillFlwCtrlInfo(cell, subfrmAlloc);
14548 if((subfrmAlloc->rarInfo.numRaRntis) ||
14550 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14551 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14552 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14554 (subfrmAlloc->ueInfo.numUes) ||
14555 (subfrmAlloc->cmnLcInfo.bitMask) ||
14556 (subfrmAlloc->ulUeInfo.numUes) ||
14557 (subfrmAlloc->flowCntrlInfo.numUes))
14559 if((subfrmAlloc->rarInfo.numRaRntis) ||
14561 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14562 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14563 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14565 (subfrmAlloc->ueInfo.numUes) ||
14566 (subfrmAlloc->cmnLcInfo.bitMask) ||
14567 (subfrmAlloc->flowCntrlInfo.numUes))
14570 RgSchMacSfAlloc(&pst, subfrmAlloc);
14573 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_NUM_SUB_FRAMES;
14575 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_SF_ALLOC_SIZE;
14581 * @brief Consolidate Subframe Allocations.
14585 * Function: rgSCHCmnCnsldtSfAlloc
14586 * Purpose: Consolidate Subframe Allocations.
14588 * Invoked by: Common Scheduler
14590 * @param[in] RgSchCellCb *cell
14594 PUBLIC Void rgSCHCmnCnsldtSfAlloc
14599 PUBLIC Void rgSCHCmnCnsldtSfAlloc(cell)
14603 RgInfSfAlloc *subfrmAlloc;
14604 CmLteTimingInfo frm;
14606 CmLListCp dlDrxInactvTmrLst;
14607 CmLListCp dlInActvLst;
14608 CmLListCp ulInActvLst;
14609 RgSchCmnCell *cellSch = NULLP;
14611 TRC2(rgSCHCmnCnsldtSfAlloc);
14613 cmLListInit(&dlDrxInactvTmrLst);
14614 cmLListInit(&dlInActvLst);
14615 cmLListInit(&ulInActvLst);
14617 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14619 /* Get Downlink Subframe */
14620 frm = cell->crntTime;
14621 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
14622 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14624 /* Fill the allocation Info */
14625 rgSCHUtlFillRgInfRarInfo(dlSf, subfrmAlloc, cell);
14628 rgSCHUtlFillRgInfUeInfo(dlSf, cell, &dlDrxInactvTmrLst,
14629 &dlInActvLst, &ulInActvLst);
14630 #ifdef RG_PFS_STATS
14631 cell->totalPrb += dlSf->bwAssigned;
14633 /* Mark the following Ues inactive for UL*/
14634 cellSch = RG_SCH_CMN_GET_CELL(cell);
14636 /* Calling Scheduler specific function with DRX inactive UE list*/
14637 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInActvLst);
14638 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInActvLst);
14641 /*re/start DRX inactivity timer for the UEs*/
14642 (Void)rgSCHDrxStrtInActvTmr(cell,&dlDrxInactvTmrLst,RG_SCH_DRX_DL);
14648 * @brief Initialize the DL Allocation Information Structure.
14652 * Function: rgSCHCmnInitDlRbAllocInfo
14653 * Purpose: Initialize the DL Allocation Information Structure.
14655 * Invoked by: Common Scheduler
14657 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
14661 PRIVATE Void rgSCHCmnInitDlRbAllocInfo
14663 RgSchCmnDlRbAllocInfo *allocInfo
14666 PRIVATE Void rgSCHCmnInitDlRbAllocInfo(allocInfo)
14667 RgSchCmnDlRbAllocInfo *allocInfo;
14670 TRC2(rgSCHCmnInitDlRbAllocInfo);
14671 cmMemset((U8 *)&allocInfo->pcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14672 cmMemset((U8 *)&allocInfo->bcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14673 cmMemset((U8 *)allocInfo->raRspAlloc, (U8)0,
14674 RG_SCH_CMN_MAX_CMN_PDCCH*sizeof(RgSchDlRbAlloc));
14676 allocInfo->msg4Alloc.msg4DlSf = NULLP;
14677 cmLListInit(&allocInfo->msg4Alloc.msg4TxLst);
14678 cmLListInit(&allocInfo->msg4Alloc.msg4RetxLst);
14679 cmLListInit(&allocInfo->msg4Alloc.schdMsg4TxLst);
14680 cmLListInit(&allocInfo->msg4Alloc.schdMsg4RetxLst);
14681 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4TxLst);
14682 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4RetxLst);
14684 allocInfo->ccchSduAlloc.ccchSduDlSf = NULLP;
14685 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduTxLst);
14686 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduRetxLst);
14687 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduTxLst);
14688 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduRetxLst);
14689 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst);
14690 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst);
14693 allocInfo->dedAlloc.dedDlSf = NULLP;
14694 cmLListInit(&allocInfo->dedAlloc.txHqPLst);
14695 cmLListInit(&allocInfo->dedAlloc.retxHqPLst);
14696 cmLListInit(&allocInfo->dedAlloc.schdTxHqPLst);
14697 cmLListInit(&allocInfo->dedAlloc.schdRetxHqPLst);
14698 cmLListInit(&allocInfo->dedAlloc.nonSchdTxHqPLst);
14699 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxHqPLst);
14701 cmLListInit(&allocInfo->dedAlloc.txRetxHqPLst);
14702 cmLListInit(&allocInfo->dedAlloc.schdTxRetxHqPLst);
14703 cmLListInit(&allocInfo->dedAlloc.nonSchdTxRetxHqPLst);
14705 cmLListInit(&allocInfo->dedAlloc.txSpsHqPLst);
14706 cmLListInit(&allocInfo->dedAlloc.retxSpsHqPLst);
14707 cmLListInit(&allocInfo->dedAlloc.schdTxSpsHqPLst);
14708 cmLListInit(&allocInfo->dedAlloc.schdRetxSpsHqPLst);
14709 cmLListInit(&allocInfo->dedAlloc.nonSchdTxSpsHqPLst);
14710 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxSpsHqPLst);
14714 rgSCHLaaCmnInitDlRbAllocInfo (allocInfo);
14717 cmLListInit(&allocInfo->dedAlloc.errIndTxHqPLst);
14718 cmLListInit(&allocInfo->dedAlloc.schdErrIndTxHqPLst);
14719 cmLListInit(&allocInfo->dedAlloc.nonSchdErrIndTxHqPLst);
14724 * @brief Initialize the UL Allocation Information Structure.
14728 * Function: rgSCHCmnInitUlRbAllocInfo
14729 * Purpose: Initialize the UL Allocation Information Structure.
14731 * Invoked by: Common Scheduler
14733 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
14737 PUBLIC Void rgSCHCmnInitUlRbAllocInfo
14739 RgSchCmnUlRbAllocInfo *allocInfo
14742 PUBLIC Void rgSCHCmnInitUlRbAllocInfo(allocInfo)
14743 RgSchCmnUlRbAllocInfo *allocInfo;
14746 TRC2(rgSCHCmnInitUlRbAllocInfo);
14747 allocInfo->sf = NULLP;
14748 cmLListInit(&allocInfo->contResLst);
14749 cmLListInit(&allocInfo->schdContResLst);
14750 cmLListInit(&allocInfo->nonSchdContResLst);
14751 cmLListInit(&allocInfo->ueLst);
14752 cmLListInit(&allocInfo->schdUeLst);
14753 cmLListInit(&allocInfo->nonSchdUeLst);
14759 * @brief Scheduling for PUCCH group power control.
14763 * Function: rgSCHCmnGrpPwrCntrlPucch
14764 * Purpose: This function does group power control for PUCCH
14765 * corresponding to the subframe for which DL UE allocations
14768 * Invoked by: Common Scheduler
14770 * @param[in] RgSchCellCb *cell
14774 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch
14780 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch(cell, dlSf)
14785 TRC2(rgSCHCmnGrpPwrCntrlPucch);
14787 rgSCHPwrGrpCntrlPucch(cell, dlSf);
14793 * @brief Scheduling for PUSCH group power control.
14797 * Function: rgSCHCmnGrpPwrCntrlPusch
14798 * Purpose: This function does group power control, for
14799 * the subframe for which UL allocation has (just) happened.
14801 * Invoked by: Common Scheduler
14803 * @param[in] RgSchCellCb *cell
14804 * @param[in] RgSchUlSf *ulSf
14808 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch
14814 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch(cell, ulSf)
14819 /*removed unused variable *cellSch*/
14820 CmLteTimingInfo frm;
14823 TRC2(rgSCHCmnGrpPwrCntrlPusch);
14825 /* Got to pass DL SF corresponding to UL SF, so get that first.
14826 * There is no easy way of getting dlSf by having the RgSchUlSf*,
14827 * so use the UL delta from current time to get the DL SF. */
14828 frm = cell->crntTime;
14831 if(cell->emtcEnable == TRUE)
14833 RGSCH_INCR_SUB_FRAME_EMTC(frm, TFU_DLCNTRL_DLDELTA);
14838 RGSCH_INCR_SUB_FRAME(frm, TFU_DLCNTRL_DLDELTA);
14840 /* Del filling of dl.time */
14841 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14843 rgSCHPwrGrpCntrlPusch(cell, dlSf, ulSf);
14848 /* Fix: syed align multiple UEs to refresh at same time */
14849 /***********************************************************
14851 * Func : rgSCHCmnApplyUeRefresh
14853 * Desc : Apply UE refresh in CMN and Specific
14854 * schedulers. Data rates and corresponding
14855 * scratchpad variables are updated.
14863 **********************************************************/
14865 PRIVATE S16 rgSCHCmnApplyUeRefresh
14871 PRIVATE S16 rgSCHCmnApplyUeRefresh(cell, ue)
14876 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14878 U32 effNonGbrBsr = 0;
14881 TRC2(rgSCHCmnApplyUeRefresh);
14883 /* Reset the refresh cycle variableCAP */
14884 ue->ul.effAmbr = ue->ul.cfgdAmbr;
14886 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
14888 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
14890 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
14892 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
14894 cmnLcg->effGbr = cmnLcg->cfgdGbr;
14895 cmnLcg->effDeltaMbr = cmnLcg->deltaMbr;
14896 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
14897 /* Considering GBR LCG will be prioritised by UE */
14898 effGbrBsr += cmnLcg->bs;
14899 }/* Else no remaing BS so nonLcg0 will be updated when BSR will be received */
14902 effNonGbrBsr += cmnLcg->reportedBs;
14903 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
14907 effNonGbrBsr = RGSCH_MIN(effNonGbrBsr,ue->ul.effAmbr);
14908 ue->ul.nonGbrLcgBs = effNonGbrBsr;
14910 ue->ul.nonLcg0Bs = effGbrBsr + effNonGbrBsr;
14911 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
14912 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
14915 /* call scheduler specific event handlers
14916 * for refresh timer expiry */
14917 cellSch->apisUl->rgSCHUlUeRefresh(cell, ue);
14918 cellSch->apisDl->rgSCHDlUeRefresh(cell, ue);
14923 /***********************************************************
14925 * Func : rgSCHCmnTmrExpiry
14927 * Desc : Adds an UE to refresh queue, so that the UE is
14928 * periodically triggered to refresh it's GBR and
14937 **********************************************************/
14939 PRIVATE S16 rgSCHCmnTmrExpiry
14941 PTR cb, /* Pointer to timer control block */
14942 S16 tmrEvnt /* Timer Event */
14945 PRIVATE S16 rgSCHCmnTmrExpiry(cb, tmrEvnt)
14946 PTR cb; /* Pointer to timer control block */
14947 S16 tmrEvnt; /* Timer Event */
14950 RgSchUeCb *ue = (RgSchUeCb *)cb;
14951 RgSchCellCb *cell = ue->cell;
14952 #if (ERRCLASS & ERRCLS_DEBUG)
14955 TRC2(rgSCHCmnTmrExpiry);
14957 #if (ERRCLASS & ERRCLS_DEBUG)
14958 if (tmrEvnt != RG_SCH_CMN_EVNT_UE_REFRESH)
14960 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnTmrExpiry(): Invalid "
14961 "timer event CRNTI:%d",ue->ueId);
14968 rgSCHCmnApplyUeRefresh(cell, ue);
14970 rgSCHCmnAddUeToRefreshQ(cell, ue, RG_SCH_CMN_REFRESH_TIME);
14975 /***********************************************************
14977 * Func : rgSCHCmnTmrProc
14979 * Desc : Timer entry point per cell. Timer
14980 * processing is triggered at every frame boundary
14989 **********************************************************/
14991 PRIVATE S16 rgSCHCmnTmrProc
14996 PRIVATE S16 rgSCHCmnTmrProc(cell)
15000 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
15001 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
15002 /* Moving the assignment of scheduler pointer
15003 to available scope for optimization */
15004 TRC2(rgSCHCmnTmrProc);
15006 if ((cell->crntTime.slot % RGSCH_NUM_SUB_FRAMES_5G) == 0)
15008 /* Reset the counters periodically */
15009 if ((cell->crntTime.sfn % RG_SCH_CMN_CSG_REFRESH_TIME) == 0)
15011 RG_SCH_RESET_HCSG_DL_PRB_CNTR(cmnDlCell);
15012 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cmnUlCell);
15014 if ((cell->crntTime.sfn % RG_SCH_CMN_OVRLDCTRL_REFRESH_TIME) == 0)
15017 cell->measurements.ulTpt = ((cell->measurements.ulTpt * 95) + ( cell->measurements.ulBytesCnt * 5))/100;
15018 cell->measurements.dlTpt = ((cell->measurements.dlTpt * 95) + ( cell->measurements.dlBytesCnt * 5))/100;
15020 rgSCHUtlCpuOvrLdAdjItbsCap(cell);
15021 /* reset cell level tpt measurements for next cycle */
15022 cell->measurements.ulBytesCnt = 0;
15023 cell->measurements.dlBytesCnt = 0;
15025 /* Comparing with Zero instead of % is being done for efficiency.
15026 * If Timer resolution changes then accordingly update the
15027 * macro RG_SCH_CMN_REFRESH_TIMERES */
15028 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
15029 cmPrcTmr(&sched->tmrTqCp, sched->tmrTq, (PFV)rgSCHCmnTmrExpiry);
15036 /***********************************************************
15038 * Func : rgSchCmnUpdCfiVal
15040 * Desc : Update the CFI value if CFI switch was done
15048 **********************************************************/
15050 PRIVATE Void rgSchCmnUpdCfiVal
15056 PRIVATE Void rgSchCmnUpdCfiVal(cell, delta)
15062 CmLteTimingInfo pdsch;
15063 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15072 TRC2(rgSchCmnUpdCfiVal);
15074 pdsch = cell->crntTime;
15075 RGSCH_INCR_SUB_FRAME(pdsch, delta);
15076 dlSf = rgSCHUtlSubFrmGet(cell, pdsch);
15077 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15078 *change happens in that SF then UL PDCCH allocation happens with old CFI
15079 *but CFI in control Req goes updated one since it was stored in the CELL
15081 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15082 if(cell->dynCfiCb.pdcchSfIdx != 0xFF)
15085 dlIdx = rgSCHUtlGetDlSfIdx(cell, &pdsch);
15087 dlIdx = (((pdsch.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (pdsch.slot % RGSCH_NUM_SUB_FRAMES));
15088 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15090 /* If current downlink subframe index is same as pdcch SF index,
15091 * perform the switching of CFI in this subframe */
15092 if(cell->dynCfiCb.pdcchSfIdx == dlIdx)
15094 cellCmnDl->currCfi = cellCmnDl->newCfi;
15095 cell->dynCfiCb.pdcchSfIdx = 0xFF;
15097 /* Updating the nCce value based on the new CFI */
15099 splSfCfi = cellCmnDl->newCfi;
15100 for(idx = 0; idx < cell->numDlSubfrms; idx++)
15102 tddSf = cell->subFrms[idx];
15104 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][tddSf->sfNum];
15106 if(tddSf->sfType == RG_SCH_SPL_SF_DATA)
15108 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
15110 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
15114 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][cellCmnDl->currCfi];
15117 /* Setting the switch over window length based on config index.
15118 * During switch over period all the UL trnsmissions are Acked
15120 cell->dynCfiCb.switchOvrWinLen =
15121 rgSchCfiSwitchOvrWinLen[cell->ulDlCfgIdx];
15123 cell->nCce = cell->dynCfiCb.cfi2NCceTbl[0][cellCmnDl->currCfi];
15124 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15125 *change happens in that SF then UL PDCCH allocation happens with old CFI
15126 *but CFI in control Req goes updated one since it was stored in the CELL
15128 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15129 cell->dynCfiCb.switchOvrWinLen = rgSchCfiSwitchOvrWinLen[7];
15137 /***********************************************************
15139 * Func : rgSchCmnUpdtPdcchSfIdx
15141 * Desc : Update the switch over window length
15149 **********************************************************/
15152 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15159 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, sfNum)
15166 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15172 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx)
15180 TRC2(rgSchCmnUpdtPdcchSfIdx);
15182 /* Resetting the parameters on CFI switching */
15183 cell->dynCfiCb.cceUsed = 0;
15184 cell->dynCfiCb.lowCceCnt = 0;
15186 cell->dynCfiCb.cceFailSum = 0;
15187 cell->dynCfiCb.cceFailCnt = 0;
15188 cell->dynCfiCb.prevCceFailIdx = 0;
15190 cell->dynCfiCb.switchOvrInProgress = TRUE;
15192 for(idx = 0; idx < cell->dynCfiCb.numFailSamples; idx++)
15194 cell->dynCfiCb.cceFailSamples[idx] = 0;
15197 cell->dynCfiCb.ttiCnt = 0;
15199 cell->dynCfiCb.cfiSwitches++;
15200 cfiSwitchCnt = cell->dynCfiCb.cfiSwitches;
15203 cell->dynCfiCb.pdcchSfIdx = (dlIdx +
15204 rgSchTddPdcchSfIncTbl[cell->ulDlCfgIdx][sfNum]) % cell->numDlSubfrms;
15206 cell->dynCfiCb.pdcchSfIdx = (dlIdx + RG_SCH_CFI_APPLY_DELTA) % \
15207 RGSCH_NUM_DL_slotS;
15211 /***********************************************************
15213 * Func : rgSchCmnUpdCfiDb
15215 * Desc : Update the counters related to dynamic
15216 * CFI feature in cellCb.
15224 **********************************************************/
15226 PUBLIC Void rgSchCmnUpdCfiDb
15232 PUBLIC Void rgSchCmnUpdCfiDb(cell, delta)
15237 CmLteTimingInfo frm;
15243 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15244 U8 nCceLowerCfi = 0;
15251 TRC2(rgSchCmnUpdCfiDb);
15253 /* Get Downlink Subframe */
15254 frm = cell->crntTime;
15255 RGSCH_INCR_SUB_FRAME(frm, delta);
15258 dlIdx = rgSCHUtlGetDlSfIdx(cell, &frm);
15259 dlSf = cell->subFrms[dlIdx];
15260 isHiDci0 = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15262 /* Changing the idexing
15263 so that proper subframe is selected */
15264 dlIdx = (((frm.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (frm.slot % RGSCH_NUM_SUB_FRAMES));
15265 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15266 dlSf = cell->subFrms[dlIdx];
15269 currCfi = cellSch->dl.currCfi;
15271 if(!cell->dynCfiCb.switchOvrInProgress)
15274 if(!cell->dynCfiCb.isDynCfiEnb)
15276 if(currCfi != cellSch->cfiCfg.cfi)
15278 if(currCfi < cellSch->cfiCfg.cfi)
15280 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15281 cfiIncr = cell->dynCfiCb.cfiIncr;
15285 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15286 cfiDecr = cell->dynCfiCb.cfiDecr;
15293 /* Setting ttiMod to 0 for ttiCnt > 1000 in case if this
15294 * function was not called in UL subframe*/
15295 if(cell->dynCfiCb.ttiCnt > RGSCH_CFI_TTI_MON_INTRVL)
15302 ttiMod = cell->dynCfiCb.ttiCnt % RGSCH_CFI_TTI_MON_INTRVL;
15305 dlSf->dlUlBothCmplt++;
15307 if((dlSf->dlUlBothCmplt == 2) || (!isHiDci0))
15309 if(dlSf->dlUlBothCmplt == 2)
15312 /********************STEP UP CRITERIA********************/
15313 /* Updating the CCE failure count parameter */
15314 cell->dynCfiCb.cceFailCnt += dlSf->isCceFailure;
15315 cell->dynCfiCb.cceFailSum += dlSf->isCceFailure;
15317 /* Check if cfi step up can be performed */
15318 if(currCfi < cell->dynCfiCb.maxCfi)
15320 if(cell->dynCfiCb.cceFailSum >= cell->dynCfiCb.cfiStepUpTtiCnt)
15322 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15323 cfiIncr = cell->dynCfiCb.cfiIncr;
15328 /********************STEP DOWN CRITERIA********************/
15330 /* Updating the no. of CCE used in this dl subframe */
15331 cell->dynCfiCb.cceUsed += dlSf->cceCnt;
15333 if(currCfi > RGSCH_MIN_CFI_VAL)
15335 /* calculating the number of CCE for next lower CFI */
15337 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15338 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[mPhich][currCfi-1];
15340 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[0][currCfi-1];
15342 if(dlSf->cceCnt < nCceLowerCfi)
15344 /* Updating the count of TTIs in which no. of CCEs
15345 * used were less than the CCEs of next lower CFI */
15346 cell->dynCfiCb.lowCceCnt++;
15351 totalCce = (nCceLowerCfi * cell->dynCfiCb.cfiStepDownTtiCnt *
15352 RGSCH_CFI_CCE_PERCNTG)/100;
15354 if((!cell->dynCfiCb.cceFailSum) &&
15355 (cell->dynCfiCb.lowCceCnt >=
15356 cell->dynCfiCb.cfiStepDownTtiCnt) &&
15357 (cell->dynCfiCb.cceUsed < totalCce))
15359 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15360 cfiDecr = cell->dynCfiCb.cfiDecr;
15366 cceFailIdx = ttiMod/cell->dynCfiCb.failSamplePrd;
15368 if(cceFailIdx != cell->dynCfiCb.prevCceFailIdx)
15370 /* New sample period has started. Subtract the old count
15371 * from the new sample period */
15372 cell->dynCfiCb.cceFailSum -= cell->dynCfiCb.cceFailSamples[cceFailIdx];
15374 /* Store the previous sample period data */
15375 cell->dynCfiCb.cceFailSamples[cell->dynCfiCb.prevCceFailIdx]
15376 = cell->dynCfiCb.cceFailCnt;
15378 cell->dynCfiCb.prevCceFailIdx = cceFailIdx;
15380 /* Resetting the CCE failure count as zero for next sample period */
15381 cell->dynCfiCb.cceFailCnt = 0;
15386 /* Restting the parametrs after Monitoring Interval expired */
15387 cell->dynCfiCb.cceUsed = 0;
15388 cell->dynCfiCb.lowCceCnt = 0;
15389 cell->dynCfiCb.ttiCnt = 0;
15392 cell->dynCfiCb.ttiCnt++;
15396 if(cellSch->dl.newCfi != cellSch->dl.currCfi)
15399 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, dlSf->sfNum);
15401 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx);
15408 * @brief Dl Scheduler for Broadcast and Common channel scheduling.
15412 * Function: rgSCHCmnDlCommonChSch
15413 * Purpose: This function schedules DL Common channels for LTE.
15414 * Invoked by TTI processing in TOM. Scheduling is done for
15415 * BCCH, PCCH, Msg4, CCCH SDU, RAR in that order
15417 * Invoked by: TOM (TTI processing)
15419 * @param[in] RgSchCellCb *cell
15423 PUBLIC Void rgSCHCmnDlCommonChSch
15428 PUBLIC Void rgSCHCmnDlCommonChSch(cell)
15432 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15434 TRC2(rgSCHCmnDlCommonChSch);
15436 cellSch->apisDl->rgSCHDlTickForPdbTrkng(cell);
15437 rgSchCmnUpdCfiVal(cell, RG_SCH_CMN_DL_DELTA);
15439 /* handle Inactive UEs for DL */
15440 rgSCHCmnHdlDlInactUes(cell);
15442 /* Send a Tick to Refresh Timer */
15443 rgSCHCmnTmrProc(cell);
15445 if (cell->isDlDataAllwd && (cell->stopSiSch == FALSE))
15447 rgSCHCmnInitRbAlloc(cell);
15448 /* Perform DL scheduling of BCCH, PCCH */
15449 rgSCHCmnDlBcchPcchAlloc(cell);
15453 if(cell->siCb.inWindow != 0)
15455 cell->siCb.inWindow--;
15458 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
15460 rgSCHCmnDlCcchRarAlloc(cell);
15466 * @brief Scheduler invocation per TTI.
15470 * Function: rgSCHCmnUlSch
15471 * Purpose: This function implements UL scheduler alone. This is to
15472 * be able to perform scheduling with more flexibility.
15474 * Invoked by: TOM (TTI processing)
15476 * @param[in] RgSchCellCb *cell
15480 PUBLIC Void rgSCHCmnUlSch
15485 PUBLIC Void rgSCHCmnUlSch(cell)
15489 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15491 TRC2(rgSCHCmnUlSch);
15495 if(TRUE == rgSCHLaaSCellEnabled(cell))
15501 if(cellSch->ul.schdIdx != RGSCH_INVALID_INFO)
15503 rgSchCmnUpdCfiVal(cell, TFU_ULCNTRL_DLDELTA);
15505 /* Handle Inactive UEs for UL */
15506 rgSCHCmnHdlUlInactUes(cell);
15507 /* Perform UL Scheduling EVERY TTI */
15508 rgSCHCmnUlAlloc(cell);
15510 /* Calling function to update CFI parameters*/
15511 rgSchCmnUpdCfiDb(cell, TFU_ULCNTRL_DLDELTA);
15513 if(cell->dynCfiCb.switchOvrWinLen > 0)
15515 /* Decrementing the switchover window length */
15516 cell->dynCfiCb.switchOvrWinLen--;
15518 if(!cell->dynCfiCb.switchOvrWinLen)
15520 if(cell->dynCfiCb.dynCfiRecfgPend)
15522 /* Toggling the Dynamic CFI enabling */
15523 cell->dynCfiCb.isDynCfiEnb ^= 1;
15524 rgSCHDynCfiReCfg(cell, cell->dynCfiCb.isDynCfiEnb);
15525 cell->dynCfiCb.dynCfiRecfgPend = FALSE;
15527 cell->dynCfiCb.switchOvrInProgress = FALSE;
15535 rgSCHCmnSpsUlTti(cell, NULLP);
15545 * @brief This function updates the scheduler with service for an UE.
15549 * Function: rgSCHCmnDlDedBoUpd
15550 * Purpose: This function should be called whenever there is a
15551 * change BO for a service.
15553 * Invoked by: BO and Scheduler
15555 * @param[in] RgSchCellCb* cell
15556 * @param[in] RgSchUeCb* ue
15557 * @param[in] RgSchDlLcCb* svc
15562 PUBLIC Void rgSCHCmnDlDedBoUpd
15569 PUBLIC Void rgSCHCmnDlDedBoUpd(cell, ue, svc)
15575 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15576 TRC2(rgSCHCmnDlDedBoUpd);
15578 /* RACHO : if UEs idle time exceeded and a BO update
15579 * is received, then add UE to the pdcch Order Q */
15580 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
15582 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue, cell);
15583 /* If PDCCH order is already triggered and we are waiting for
15584 * RACH from UE then do not add to PdcchOdrQ. */
15585 if (ueDl->rachInfo.rapIdLnk.node == NULLP)
15587 rgSCHCmnDlAdd2PdcchOdrQ(cell, ue);
15593 /* If SPS service, invoke SPS module */
15594 if (svc->dlLcSpsCfg.isSpsEnabled)
15596 rgSCHCmnSpsDlDedBoUpd(cell, ue, svc);
15597 /* Note: Retrun from here, no update needed in other schedulers */
15602 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
15604 cellSch->apisEmtcDl->rgSCHDlDedBoUpd(cell, ue, svc);
15605 //printf("rgSCHEMTCDlDedBoUpd\n");
15610 cellSch->apisDl->rgSCHDlDedBoUpd(cell, ue, svc);
15615 rgSCHSCellDlDedBoUpd(cell, ue, svc);
15623 * @brief Removes an UE from Cell's TA List.
15627 * Function: rgSCHCmnRmvFrmTaLst
15628 * Purpose: Removes an UE from Cell's TA List.
15630 * Invoked by: Specific Scheduler
15632 * @param[in] RgSchCellCb* cell
15633 * @param[in] RgSchUeCb* ue
15638 PUBLIC Void rgSCHCmnRmvFrmTaLst
15644 PUBLIC Void rgSCHCmnRmvFrmTaLst(cell, ue)
15649 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15650 TRC2(rgSCHCmnRmvFrmTaLst);
15653 if(cell->emtcEnable && ue->isEmtcUe)
15655 rgSCHEmtcRmvFrmTaLst(cellCmnDl,ue);
15660 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
15661 ue->dlTaLnk.node = (PTR)NULLP;
15666 /* Fix: syed Remove the msg4Proc from cell
15667 * msg4Retx Queue. I have used CMN scheduler function
15668 * directly. Please define a new API and call this
15669 * function through that. */
15672 * @brief This function removes MSG4 HARQ process from cell RETX Queues.
15676 * Function: rgSCHCmnDlMsg4ProcRmvFrmRetx
15677 * Purpose: This function removes MSG4 HARQ process from cell RETX Queues.
15679 * Invoked by: UE/RACB deletion.
15681 * @param[in] RgSchCellCb* cell
15682 * @param[in] RgSchDlHqProc* hqP
15687 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx
15690 RgSchDlHqProcCb *hqP
15693 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx(cell, hqP)
15695 RgSchDlHqProcCb *hqP;
15698 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15699 TRC2(rgSCHCmnDlMsg4ProcRmvFrmRetx);
15701 if (hqP->tbInfo[0].ccchSchdInfo.retxLnk.node)
15703 if (hqP->hqE->msg4Proc == hqP)
15705 cmLListDelFrm(&cellSch->dl.msg4RetxLst, \
15706 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15707 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15710 else if(hqP->hqE->ccchSduProc == hqP)
15712 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
15713 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15714 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15723 * @brief This function adds a HARQ process for retx.
15727 * Function: rgSCHCmnDlProcAddToRetx
15728 * Purpose: This function adds a HARQ process to retransmission
15729 * queue. This may be performed when a HARQ ack is
15732 * Invoked by: HARQ feedback processing
15734 * @param[in] RgSchCellCb* cell
15735 * @param[in] RgSchDlHqProc* hqP
15740 PUBLIC Void rgSCHCmnDlProcAddToRetx
15743 RgSchDlHqProcCb *hqP
15746 PUBLIC Void rgSCHCmnDlProcAddToRetx(cell, hqP)
15748 RgSchDlHqProcCb *hqP;
15751 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15752 TRC2(rgSCHCmnDlProcAddToRetx);
15754 if (hqP->hqE->msg4Proc == hqP) /* indicating msg4 transmission */
15756 cmLListAdd2Tail(&cellSch->dl.msg4RetxLst, \
15757 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15758 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15761 else if(hqP->hqE->ccchSduProc == hqP)
15763 /*If CCCH SDU being transmitted without cont res CE*/
15764 cmLListAdd2Tail(&cellSch->dl.ccchSduRetxLst,
15765 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15766 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15772 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
15774 /* Invoke SPS module for SPS HARQ proc re-transmission handling */
15775 rgSCHCmnSpsDlProcAddToRetx(cell, hqP);
15778 #endif /* LTEMAC_SPS */
15780 if((TRUE == cell->emtcEnable)
15781 && (TRUE == hqP->hqE->ue->isEmtcUe))
15783 cellSch->apisEmtcDl->rgSCHDlProcAddToRetx(cell, hqP);
15788 cellSch->apisDl->rgSCHDlProcAddToRetx(cell, hqP);
15796 * @brief This function performs RI validation and
15797 * updates it to the ueCb.
15801 * Function: rgSCHCmnDlSetUeRi
15802 * Purpose: This function performs RI validation and
15803 * updates it to the ueCb.
15805 * Invoked by: rgSCHCmnDlCqiInd
15807 * @param[in] RgSchCellCb *cell
15808 * @param[in] RgSchUeCb *ue
15810 * @param[in] Bool isPeriodic
15815 PRIVATE Void rgSCHCmnDlSetUeRi
15823 PRIVATE Void rgSCHCmnDlSetUeRi(cell, ue, ri, isPer)
15830 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15831 RgSchCmnUeInfo *ueSchCmn = RG_SCH_CMN_GET_CMN_UE(ue);
15832 TRC2(rgSCHCmnDlSetUeRi);
15835 RgSchUePCqiCb *cqiCb = RG_SCH_GET_UE_CELL_CQI_CB(ue,cell);
15840 /* FIX for RRC Reconfiguration issue */
15841 /* ccpu00140894- During Tx Mode transition RI report will not entertained for
15842 * specific during which SCH expecting UE can complete TX mode transition*/
15843 if (ue->txModeTransCmplt == FALSE)
15848 /* Restrict the Number of TX layers to cell->numTxAntPorts.
15849 * Protection from invalid RI values. */
15850 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
15852 /* Special case of converting PMI to sane value when
15853 * there is a switch in RI from 1 to 2 and PMI reported
15854 * for RI=1 is invalid for RI=2 */
15855 if ((cell->numTxAntPorts == 2) && (ue->mimoInfo.txMode == RGR_UE_TM_4))
15857 if ((ri == 2) && ( ueDl->mimoInfo.ri == 1))
15859 ueDl->mimoInfo.pmi = (ueDl->mimoInfo.pmi < 2)? 1:2;
15863 /* Restrict the Number of TX layers according to the UE Category */
15864 ueDl->mimoInfo.ri = RGSCH_MIN(ri, rgUeCatTbl[ueSchCmn->ueCat].maxTxLyrs);
15866 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].riCnt[ueDl->mimoInfo.ri-1]++;
15867 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15871 ue->tenbStats->stats.nonPersistent.sch[0].riCnt[ueDl->mimoInfo.ri-1]++;
15872 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15878 /* If RI is from Periodic CQI report */
15879 cqiCb->perRiVal = ueDl->mimoInfo.ri;
15880 /* Reset at every Periodic RI Reception */
15881 cqiCb->invalidateCqi = FALSE;
15885 /* If RI is from Aperiodic CQI report */
15886 if (cqiCb->perRiVal != ueDl->mimoInfo.ri)
15888 /* if this aperRI is different from last reported
15889 * perRI then invalidate all CQI reports till next
15891 cqiCb->invalidateCqi = TRUE;
15895 cqiCb->invalidateCqi = FALSE;
15900 if (ueDl->mimoInfo.ri > 1)
15902 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15904 else if (ue->mimoInfo.txMode == RGR_UE_TM_3) /* ri == 1 */
15906 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15914 * @brief This function performs PMI validation and
15915 * updates it to the ueCb.
15919 * Function: rgSCHCmnDlSetUePmi
15920 * Purpose: This function performs PMI validation and
15921 * updates it to the ueCb.
15923 * Invoked by: rgSCHCmnDlCqiInd
15925 * @param[in] RgSchCellCb *cell
15926 * @param[in] RgSchUeCb *ue
15927 * @param[in] U8 pmi
15932 PRIVATE S16 rgSCHCmnDlSetUePmi
15939 PRIVATE S16 rgSCHCmnDlSetUePmi(cell, ue, pmi)
15945 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15946 TRC2(rgSCHCmnDlSetUePmi);
15948 if (ue->txModeTransCmplt == FALSE)
15953 if (cell->numTxAntPorts == 2)
15959 if (ueDl->mimoInfo.ri == 2)
15961 /*ccpu00118150 - MOD - changed pmi value validation from 0 to 2*/
15962 /* PMI 2 and 3 are invalid incase of 2 TxAnt and 2 Layered SM */
15963 if (pmi == 2 || pmi == 3)
15967 ueDl->mimoInfo.pmi = pmi+1;
15971 ueDl->mimoInfo.pmi = pmi;
15974 else if (cell->numTxAntPorts == 4)
15980 ueDl->mimoInfo.pmi = pmi;
15982 /* Reset the No PMI Flag in forceTD */
15983 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
15988 * @brief This function Updates the DL CQI on PUCCH for the UE.
15992 * Function: rgSCHCmnDlProcCqiMode10
15994 * This function updates the DL CQI on PUCCH for the UE.
15996 * Invoked by: rgSCHCmnDlCqiOnPucchInd
15998 * Processing Steps:
16000 * @param[in] RgSchCellCb *cell
16001 * @param[in] RgSchUeCb *ue
16002 * @param[in] TfuDlCqiRpt *dlCqiRpt
16007 #ifdef RGR_CQI_REPT
16009 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16013 TfuDlCqiPucch *pucchCqi,
16017 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail)
16020 TfuDlCqiPucch *pucchCqi;
16025 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16029 TfuDlCqiPucch *pucchCqi
16032 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi)
16035 TfuDlCqiPucch *pucchCqi;
16039 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16040 TRC2(rgSCHCmnDlProcCqiMode10);
16042 if (pucchCqi->u.mode10Info.type == TFU_RPT_CQI)
16044 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16045 /* Checking whether the decoded CQI is a value between 1 and 15*/
16046 if((pucchCqi->u.mode10Info.u.cqi) && (pucchCqi->u.mode10Info.u.cqi
16047 < RG_SCH_CMN_MAX_CQI))
16049 ueDl->cqiFlag = TRUE;
16050 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode10Info.u.cqi;
16051 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16052 /* ccpu00117452 - MOD - Changed macro name from
16053 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16054 #ifdef RGR_CQI_REPT
16055 *isCqiAvail = TRUE;
16063 else if (pucchCqi->u.mode10Info.type == TFU_RPT_RI)
16065 if ( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode10Info.u.ri) )
16067 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode10Info.u.ri,
16072 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16073 pucchCqi->u.mode10Info.u.ri,ue->ueId);
16080 * @brief This function Updates the DL CQI on PUCCH for the UE.
16084 * Function: rgSCHCmnDlProcCqiMode11
16086 * This function updates the DL CQI on PUCCH for the UE.
16088 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16090 * Processing Steps:
16091 * Process CQI MODE 11
16092 * @param[in] RgSchCellCb *cell
16093 * @param[in] RgSchUeCb *ue
16094 * @param[in] TfuDlCqiRpt *dlCqiRpt
16099 #ifdef RGR_CQI_REPT
16101 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16105 TfuDlCqiPucch *pucchCqi,
16107 Bool *is2ndCwCqiAvail
16110 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16113 TfuDlCqiPucch *pucchCqi;
16115 Bool *is2ndCwCqiAvail;
16119 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16123 TfuDlCqiPucch *pucchCqi
16126 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi)
16129 TfuDlCqiPucch *pucchCqi;
16133 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16134 TRC2(rgSCHCmnDlProcCqiMode11);
16136 if (pucchCqi->u.mode11Info.type == TFU_RPT_CQI)
16138 ue->mimoInfo.puschFdbkVld = FALSE;
16139 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16140 if((pucchCqi->u.mode11Info.u.cqi.cqi) &&
16141 (pucchCqi->u.mode11Info.u.cqi.cqi < RG_SCH_CMN_MAX_CQI))
16143 ueDl->cqiFlag = TRUE;
16144 /* ccpu00117452 - MOD - Changed macro name from
16145 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16146 #ifdef RGR_CQI_REPT
16147 *isCqiAvail = TRUE;
16149 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode11Info.u.cqi.cqi;
16150 if (pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.pres)
16152 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16153 ueDl->mimoInfo.cwInfo[1].cqi, \
16154 pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.val);
16155 #ifdef RGR_CQI_REPT
16156 /* ccpu00117259 - ADD - Considering second codeword CQI info
16157 incase of MIMO for CQI Reporting */
16158 *is2ndCwCqiAvail = TRUE;
16166 rgSCHCmnDlSetUePmi(cell, ue, \
16167 pucchCqi->u.mode11Info.u.cqi.pmi);
16169 else if (pucchCqi->u.mode11Info.type == TFU_RPT_RI)
16171 if( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode11Info.u.ri))
16173 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode11Info.u.ri,
16178 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16179 pucchCqi->u.mode11Info.u.ri,ue->ueId);
16186 * @brief This function Updates the DL CQI on PUCCH for the UE.
16190 * Function: rgSCHCmnDlProcCqiMode20
16192 * This function updates the DL CQI on PUCCH for the UE.
16194 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16196 * Processing Steps:
16197 * Process CQI MODE 20
16198 * @param[in] RgSchCellCb *cell
16199 * @param[in] RgSchUeCb *ue
16200 * @param[in] TfuDlCqiRpt *dlCqiRpt
16205 #ifdef RGR_CQI_REPT
16207 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16211 TfuDlCqiPucch *pucchCqi,
16215 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail )
16218 TfuDlCqiPucch *pucchCqi;
16223 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16227 TfuDlCqiPucch *pucchCqi
16230 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi)
16233 TfuDlCqiPucch *pucchCqi;
16237 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16238 TRC2(rgSCHCmnDlProcCqiMode20);
16240 if (pucchCqi->u.mode20Info.type == TFU_RPT_CQI)
16242 if (pucchCqi->u.mode20Info.u.cqi.isWideband)
16244 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16245 if((pucchCqi->u.mode20Info.u.cqi.u.wideCqi) &&
16246 (pucchCqi->u.mode20Info.u.cqi.u.wideCqi < RG_SCH_CMN_MAX_CQI))
16248 ueDl->cqiFlag = TRUE;
16249 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode20Info.u.cqi.\
16251 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16252 /* ccpu00117452 - MOD - Changed macro name from
16253 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16254 #ifdef RGR_CQI_REPT
16255 *isCqiAvail = TRUE;
16264 else if (pucchCqi->u.mode20Info.type == TFU_RPT_RI)
16266 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode20Info.u.ri))
16268 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode20Info.u.ri,
16273 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16274 pucchCqi->u.mode20Info.u.ri,ue->ueId);
16282 * @brief This function Updates the DL CQI on PUCCH for the UE.
16286 * Function: rgSCHCmnDlProcCqiMode21
16288 * This function updates the DL CQI on PUCCH for the UE.
16290 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16292 * Processing Steps:
16293 * Process CQI MODE 21
16294 * @param[in] RgSchCellCb *cell
16295 * @param[in] RgSchUeCb *ue
16296 * @param[in] TfuDlCqiRpt *dlCqiRpt
16301 #ifdef RGR_CQI_REPT
16303 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16307 TfuDlCqiPucch *pucchCqi,
16309 Bool *is2ndCwCqiAvail
16312 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16315 TfuDlCqiPucch *pucchCqi;
16316 TfuDlCqiRpt *dlCqiRpt;
16318 Bool *is2ndCwCqiAvail;
16322 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16326 TfuDlCqiPucch *pucchCqi
16329 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi)
16332 TfuDlCqiPucch *pucchCqi;
16336 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16337 TRC2(rgSCHCmnDlProcCqiMode21);
16339 if (pucchCqi->u.mode21Info.type == TFU_RPT_CQI)
16341 ue->mimoInfo.puschFdbkVld = FALSE;
16342 if (pucchCqi->u.mode21Info.u.cqi.isWideband)
16344 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16345 if((pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi) &&
16346 (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi < RG_SCH_CMN_MAX_CQI))
16348 ueDl->cqiFlag = TRUE;
16349 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode21Info.u.cqi.\
16351 if (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.pres)
16353 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16354 ueDl->mimoInfo.cwInfo[1].cqi, \
16355 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.val);
16356 #ifdef RGR_CQI_REPT
16357 /* ccpu00117259 - ADD - Considering second codeword CQI info
16358 incase of MIMO for CQI Reporting */
16359 *is2ndCwCqiAvail = TRUE;
16362 /* ccpu00117452 - MOD - Changed macro name from
16363 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16364 #ifdef RGR_CQI_REPT
16365 *isCqiAvail = TRUE;
16372 rgSCHCmnDlSetUePmi(cell, ue, \
16373 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.pmi);
16376 else if (pucchCqi->u.mode21Info.type == TFU_RPT_RI)
16378 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode21Info.u.ri))
16380 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode21Info.u.ri,
16385 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16386 pucchCqi->u.mode21Info.u.ri,ue->ueId);
16394 * @brief This function Updates the DL CQI on PUCCH for the UE.
16398 * Function: rgSCHCmnDlCqiOnPucchInd
16400 * This function updates the DL CQI on PUCCH for the UE.
16402 * Invoked by: rgSCHCmnDlCqiInd
16404 * Processing Steps:
16405 * - Depending on the reporting mode of the PUCCH, the CQI/PMI/RI values
16406 * are updated and stored for each UE
16408 * @param[in] RgSchCellCb *cell
16409 * @param[in] RgSchUeCb *ue
16410 * @param[in] TfuDlCqiRpt *dlCqiRpt
16415 #ifdef RGR_CQI_REPT
16417 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16421 TfuDlCqiPucch *pucchCqi,
16422 RgrUeCqiRept *ueCqiRept,
16424 Bool *is2ndCwCqiAvail
16427 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16430 TfuDlCqiPucch *pucchCqi;
16431 RgrUeCqiRept *ueCqiRept;
16433 Bool *is2ndCwCqiAvail;
16437 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16441 TfuDlCqiPucch *pucchCqi
16444 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi)
16447 TfuDlCqiPucch *pucchCqi;
16451 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16452 TRC2(rgSCHCmnDlCqiOnPucchInd);
16454 /* ccpu00117452 - MOD - Changed
16455 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16456 #ifdef RGR_CQI_REPT
16457 /* Save CQI mode information in the report */
16458 ueCqiRept->cqiMode = pucchCqi->mode;
16461 switch(pucchCqi->mode)
16463 case TFU_PUCCH_CQI_MODE10:
16464 #ifdef RGR_CQI_REPT
16465 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail);
16467 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi);
16469 ueDl->cqiFlag = TRUE;
16471 case TFU_PUCCH_CQI_MODE11:
16472 #ifdef RGR_CQI_REPT
16473 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail,
16476 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi);
16478 ueDl->cqiFlag = TRUE;
16480 case TFU_PUCCH_CQI_MODE20:
16481 #ifdef RGR_CQI_REPT
16482 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail);
16484 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi);
16486 ueDl->cqiFlag = TRUE;
16488 case TFU_PUCCH_CQI_MODE21:
16489 #ifdef RGR_CQI_REPT
16490 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail,
16493 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi);
16495 ueDl->cqiFlag = TRUE;
16499 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unknown CQI Mode %d",
16500 pucchCqi->mode,ue->ueId);
16501 /* ccpu00117452 - MOD - Changed macro name from
16502 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16503 #ifdef RGR_CQI_REPT
16504 *isCqiAvail = FALSE;
16511 } /* rgSCHCmnDlCqiOnPucchInd */
16515 * @brief This function Updates the DL CQI on PUSCH for the UE.
16519 * Function: rgSCHCmnDlCqiOnPuschInd
16521 * This function updates the DL CQI on PUSCH for the UE.
16523 * Invoked by: rgSCHCmnDlCqiInd
16525 * Processing Steps:
16526 * - Depending on the reporting mode of the PUSCH, the CQI/PMI/RI values
16527 * are updated and stored for each UE
16529 * @param[in] RgSchCellCb *cell
16530 * @param[in] RgSchUeCb *ue
16531 * @param[in] TfuDlCqiRpt *dlCqiRpt
16536 #ifdef RGR_CQI_REPT
16538 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16542 TfuDlCqiPusch *puschCqi,
16543 RgrUeCqiRept *ueCqiRept,
16545 Bool *is2ndCwCqiAvail
16548 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16551 TfuDlCqiPusch *puschCqi;
16552 RgrUeCqiRept *ueCqiRept;
16554 Bool *is2ndCwCqiAvail;
16558 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16562 TfuDlCqiPusch *puschCqi
16565 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi)
16568 TfuDlCqiPusch *puschCqi;
16572 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16574 TRC2(rgSCHCmnDlCqiOnPuschInd);
16575 if (puschCqi->ri.pres == PRSNT_NODEF)
16577 if (RG_SCH_CMN_IS_RI_VALID(puschCqi->ri.val))
16579 /* Saving the previous ri value to revert back
16580 in case PMI update failed */
16581 if (RGR_UE_TM_4 == ue->mimoInfo.txMode ) /* Cheking for TM4. TM8 check later */
16583 prevRiVal = ueDl->mimoInfo.ri;
16585 rgSCHCmnDlSetUeRi(cell, ue, puschCqi->ri.val, FALSE);
16589 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16590 puschCqi->ri.val,ue->ueId);
16594 ue->mimoInfo.puschFdbkVld = FALSE;
16595 /* ccpu00117452 - MOD - Changed macro name from
16596 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16597 #ifdef RGR_CQI_REPT
16598 /* Save CQI mode information in the report */
16599 ueCqiRept->cqiMode = puschCqi->mode;
16600 /* ccpu00117259 - DEL - removed default setting of isCqiAvail to TRUE */
16603 switch(puschCqi->mode)
16605 case TFU_PUSCH_CQI_MODE_20:
16606 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16607 /* Checking whether the decoded CQI is a value between 1 and 15*/
16608 if((puschCqi->u.mode20Info.wideBandCqi) &&
16609 (puschCqi->u.mode20Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16611 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode20Info.wideBandCqi;
16612 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16613 /* ccpu00117452 - MOD - Changed macro name from
16614 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16615 #ifdef RGR_CQI_REPT
16616 *isCqiAvail = TRUE;
16624 case TFU_PUSCH_CQI_MODE_30:
16625 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16626 if((puschCqi->u.mode30Info.wideBandCqi) &&
16627 (puschCqi->u.mode30Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16629 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode30Info.wideBandCqi;
16630 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16631 /* ccpu00117452 - MOD - Changed macro name from
16632 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16633 #ifdef RGR_CQI_REPT
16634 *isCqiAvail = TRUE;
16638 extern U32 gACqiRcvdCount;
16649 case TFU_PUSCH_CQI_MODE_12:
16650 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16651 if((puschCqi->u.mode12Info.cqiIdx[0]) &&
16652 (puschCqi->u.mode12Info.cqiIdx[0] < RG_SCH_CMN_MAX_CQI))
16654 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode12Info.cqiIdx[0];
16655 /* ccpu00117452 - MOD - Changed macro name from
16656 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16657 #ifdef RGR_CQI_REPT
16658 *isCqiAvail = TRUE;
16665 if((puschCqi->u.mode12Info.cqiIdx[1]) &&
16666 (puschCqi->u.mode12Info.cqiIdx[1] < RG_SCH_CMN_MAX_CQI))
16668 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode12Info.cqiIdx[1];
16669 /* ccpu00117452 - MOD - Changed macro name from
16670 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16671 #ifdef RGR_CQI_REPT
16672 /* ccpu00117259 - ADD - Considering second codeword CQI info
16673 incase of MIMO for CQI Reporting */
16674 *is2ndCwCqiAvail = TRUE;
16681 ue->mimoInfo.puschFdbkVld = TRUE;
16682 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_12;
16683 ue->mimoInfo.puschPmiInfo.u.mode12Info = puschCqi->u.mode12Info;
16684 /* : resetting this is time based. Make use of CQI reporting
16685 * periodicity, DELTA's in determining the exact time at which this
16686 * need to be reset. */
16688 case TFU_PUSCH_CQI_MODE_22:
16689 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16690 if((puschCqi->u.mode22Info.wideBandCqi[0]) &&
16691 (puschCqi->u.mode22Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16693 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode22Info.wideBandCqi[0];
16694 /* ccpu00117452 - MOD - Changed macro name from
16695 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16696 #ifdef RGR_CQI_REPT
16697 *isCqiAvail = TRUE;
16704 if((puschCqi->u.mode22Info.wideBandCqi[1]) &&
16705 (puschCqi->u.mode22Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16707 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode22Info.wideBandCqi[1];
16708 /* ccpu00117452 - MOD - Changed macro name from
16709 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16710 #ifdef RGR_CQI_REPT
16711 /* ccpu00117259 - ADD - Considering second codeword CQI info
16712 incase of MIMO for CQI Reporting */
16713 *is2ndCwCqiAvail = TRUE;
16720 rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode22Info.wideBandPmi);
16721 ue->mimoInfo.puschFdbkVld = TRUE;
16722 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_22;
16723 ue->mimoInfo.puschPmiInfo.u.mode22Info = puschCqi->u.mode22Info;
16725 case TFU_PUSCH_CQI_MODE_31:
16726 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16727 if((puschCqi->u.mode31Info.wideBandCqi[0]) &&
16728 (puschCqi->u.mode31Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16730 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode31Info.wideBandCqi[0];
16731 /* ccpu00117452 - MOD - Changed macro name from
16732 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16733 #ifdef RGR_CQI_REPT
16734 *isCqiAvail = TRUE;
16737 if (ueDl->mimoInfo.ri > 1)
16739 if((puschCqi->u.mode31Info.wideBandCqi[1]) &&
16740 (puschCqi->u.mode31Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16742 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode31Info.wideBandCqi[1];
16743 /* ccpu00117452 - MOD - Changed macro name from
16744 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16745 #ifdef RGR_CQI_REPT
16746 /* ccpu00117259 - ADD - Considering second codeword CQI info
16747 incase of MIMO for CQI Reporting */
16748 *is2ndCwCqiAvail = TRUE;
16752 if (rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode31Info.pmi) != ROK)
16754 /* To avoid Rank and PMI inconsistency */
16755 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16756 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16758 ueDl->mimoInfo.ri = prevRiVal;
16761 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_31;
16762 ue->mimoInfo.puschPmiInfo.u.mode31Info = puschCqi->u.mode31Info;
16766 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Unknown CQI Mode %d CRNTI:%d",
16767 puschCqi->mode,ue->ueId);
16768 /* CQI decoding failed revert the RI to previous value */
16769 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16770 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16772 ueDl->mimoInfo.ri = prevRiVal;
16774 /* ccpu00117452 - MOD - Changed macro name from
16775 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16776 #ifdef RGR_CQI_REPT
16777 *isCqiAvail = FALSE;
16778 /* ccpu00117259 - ADD - Considering second codeword CQI info
16779 incase of MIMO for CQI Reporting */
16780 *is2ndCwCqiAvail = FALSE;
16787 } /* rgSCHCmnDlCqiOnPuschInd */
16791 * @brief This function Updates the DL CQI for the UE.
16795 * Function: rgSCHCmnDlCqiInd
16796 * Purpose: Updates the DL CQI for the UE
16800 * @param[in] RgSchCellCb *cell
16801 * @param[in] RgSchUeCb *ue
16802 * @param[in] TfuDlCqiRpt *dlCqi
16807 PUBLIC Void rgSCHCmnDlCqiInd
16813 CmLteTimingInfo timingInfo
16816 PUBLIC Void rgSCHCmnDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo)
16821 CmLteTimingInfo timingInfo;
16824 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
16825 /* ccpu00117452 - MOD - Changed macro name from
16826 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16827 #ifdef RGR_CQI_REPT
16828 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16829 RgrUeCqiRept ueCqiRept = {{0}};
16830 Bool isCqiAvail = FALSE;
16831 /* ccpu00117259 - ADD - Considering second codeword CQI info
16832 incase of MIMO for CQI Reporting */
16833 Bool is2ndCwCqiAvail = FALSE;
16836 TRC2(rgSCHCmnDlCqiInd);
16838 #ifdef RGR_CQI_REPT
16841 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16845 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16850 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi);
16854 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi);
16858 #ifdef CQI_CONFBITMASK_DROP
16859 if(!ue->cqiConfBitMask)
16861 if (ueDl->mimoInfo.cwInfo[0].cqi >15)
16863 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16864 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16866 else if ( ueDl->mimoInfo.cwInfo[0].cqi >= ue->prevCqi)
16868 ue->prevCqi = ueDl->mimoInfo.cwInfo[0].cqi;
16872 U8 dlCqiDeltaPrev = 0;
16873 dlCqiDeltaPrev = ue->prevCqi - ueDl->mimoInfo.cwInfo[0].cqi;
16874 if (dlCqiDeltaPrev > 3)
16875 dlCqiDeltaPrev = 3;
16876 if ((ue->prevCqi - dlCqiDeltaPrev) < 6)
16882 ue->prevCqi = ue->prevCqi - dlCqiDeltaPrev;
16884 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16885 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16891 /* ccpu00117452 - MOD - Changed macro name from
16892 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16893 #ifdef RGR_CQI_REPT
16894 /* ccpu00117259 - ADD - Considering second codeword CQI info
16895 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail\
16896 in 'if' condition*/
16897 if (RG_SCH_CQIR_IS_PUSHNCQI_ENBLE(ue) && (isCqiAvail || is2ndCwCqiAvail))
16899 ueCqiRept.cqi[0] = ueDl->mimoInfo.cwInfo[0].cqi;
16901 /* ccpu00117259 - ADD - Considering second codeword CQI info
16902 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail
16903 in 'if' condition*/
16904 ueCqiRept.cqi[1] = 0;
16905 if(is2ndCwCqiAvail)
16907 ueCqiRept.cqi[1] = ueDl->mimoInfo.cwInfo[1].cqi;
16909 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, &ueCqiRept);
16914 rgSCHCmnDlSetUeAllocLmtLa(cell, ue);
16915 rgSCHCheckAndSetTxScheme(cell, ue);
16918 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), ue->isEmtcUe);
16920 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), FALSE);
16924 if (cellSch->dl.isDlFreqSel)
16926 cellSch->apisDlfs->rgSCHDlfsDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo);
16929 /* Call SPS module to update CQI indication */
16930 rgSCHCmnSpsDlCqiIndHndlr(cell, ue, timingInfo);
16932 /* Call Specific scheduler to process on dlCqiInd */
16934 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
16936 cellSch->apisEmtcDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16941 cellSch->apisDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16944 #ifdef RG_PFS_STATS
16945 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].avgCqi +=
16946 ueDl->mimoInfo.cwInfo[0].cqi;
16947 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].totalCqiOcc++;
16951 ueDl->avgCqi += ueDl->mimoInfo.cwInfo[0].cqi;
16952 ueDl->numCqiOccns++;
16953 if (ueDl->mimoInfo.ri == 1)
16964 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16965 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16966 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw0Cqi ++;
16967 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw1Cqi ++;
16968 cell->tenbStats->sch.dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16969 cell->tenbStats->sch.dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16970 cell->tenbStats->sch.dlNumCw0Cqi ++;
16971 cell->tenbStats->sch.dlNumCw1Cqi ++;
16978 * @brief This function calculates the wideband CQI from SNR
16979 * reported for each RB.
16983 * Function: rgSCHCmnCalcWcqiFrmSnr
16984 * Purpose: Wideband CQI calculation from SNR
16986 * Invoked by: RG SCH
16988 * @param[in] RgSchCellCb *cell
16989 * @param[in] TfuSrsRpt *srsRpt,
16990 * @return Wideband CQI
16994 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr
17000 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr(cell,srsRpt)
17005 U8 wideCqi=1; /*Calculated value from SNR*/
17006 TRC2(rgSCHCmnCalcWcqiFrmSnr);
17007 /*Need to map a certain SNR with a WideCQI value.
17008 * The CQI calculation is still primitive. Further, need to
17009 * use a improvized method for calculating WideCQI from SNR*/
17010 if (srsRpt->snr[0] <=50)
17014 else if (srsRpt->snr[0]>=51 && srsRpt->snr[0] <=100)
17018 else if (srsRpt->snr[0]>=101 && srsRpt->snr[0] <=150)
17022 else if (srsRpt->snr[0]>=151 && srsRpt->snr[0] <=200)
17026 else if (srsRpt->snr[0]>=201 && srsRpt->snr[0] <=250)
17035 }/*rgSCHCmnCalcWcqiFrmSnr*/
17039 * @brief This function Updates the SRS for the UE.
17043 * Function: rgSCHCmnSrsInd
17044 * Purpose: Updates the UL SRS for the UE
17048 * @param[in] RgSchCellCb *cell
17049 * @param[in] RgSchUeCb *ue
17050 * @param[in] TfuSrsRpt *srsRpt,
17055 PUBLIC Void rgSCHCmnSrsInd
17060 CmLteTimingInfo timingInfo
17063 PUBLIC Void rgSCHCmnSrsInd(cell, ue, srsRpt, timingInfo)
17067 CmLteTimingInfo timingInfo;
17070 U8 wideCqi; /*Calculated value from SNR*/
17071 U32 recReqTime; /*Received Time in TTI*/
17072 TRC2(rgSCHCmnSrsInd);
17074 recReqTime = (timingInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) + timingInfo.slot;
17075 ue->srsCb.selectedAnt = (recReqTime/ue->srsCb.peri)%2;
17076 if(srsRpt->wideCqiPres)
17078 wideCqi = srsRpt->wideCqi;
17082 wideCqi = rgSCHCmnCalcWcqiFrmSnr(cell, srsRpt);
17084 rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi);
17086 }/*rgSCHCmnSrsInd*/
17091 * @brief This function is a handler for TA report for an UE.
17095 * Function: rgSCHCmnDlTARpt
17096 * Purpose: Determine based on UE_IDLE_TIME threshold,
17097 * whether UE needs to be Linked to the scheduler's TA list OR
17098 * if it needs a PDCCH Order.
17103 * @param[in] RgSchCellCb *cell
17104 * @param[in] RgSchUeCb *ue
17109 PUBLIC Void rgSCHCmnDlTARpt
17115 PUBLIC Void rgSCHCmnDlTARpt(cell, ue)
17120 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17121 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
17122 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
17123 CmLListCp poInactvLst;
17125 TRC2(rgSCHCmnDlTARpt);
17127 /* RACHO: If UE idle time is more than threshold, then
17128 * set its poInactv pdcch order inactivity */
17129 /* Fix : syed Ignore if TaTmr is not configured */
17130 if ((ue->dl.taCb.cfgTaTmr) && (rgSCHCmnUeIdleExdThrsld(cell, ue) == ROK))
17132 U32 prevDlMsk = ue->dl.dlInactvMask;
17133 U32 prevUlMsk = ue->ul.ulInactvMask;
17134 ue->dl.dlInactvMask |= RG_PDCCHODR_INACTIVE;
17135 ue->ul.ulInactvMask |= RG_PDCCHODR_INACTIVE;
17136 /* Indicate Specific scheduler for this UEs inactivity */
17137 cmLListInit(&poInactvLst);
17138 cmLListAdd2Tail(&poInactvLst, &ueDl->rachInfo.inActUeLnk);
17139 ueDl->rachInfo.inActUeLnk.node = (PTR)ue;
17140 /* Send inactivate ind only if not already sent */
17141 if (prevDlMsk == 0)
17143 cellSch->apisDl->rgSCHDlInactvtUes(cell, &poInactvLst);
17145 if (prevUlMsk == 0)
17147 cellSch->apisUl->rgSCHUlInactvtUes(cell, &poInactvLst);
17152 /* Fix: ccpu00124009 Fix for loop in the linked list "cellDl->taLst" */
17153 if (!ue->dlTaLnk.node)
17156 if(cell->emtcEnable)
17160 rgSCHEmtcAddToTaLst(cellDl,ue);
17167 cmLListAdd2Tail(&cellDl->taLst, &ue->dlTaLnk);
17168 ue->dlTaLnk.node = (PTR)ue;
17173 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
17174 "<TA>TA duplicate entry attempt failed: UEID:%u",
17183 * @brief Indication of UL CQI.
17187 * Function : rgSCHCmnFindUlCqiUlTxAnt
17189 * - Finds the Best Tx Antenna amongst the CQIs received
17190 * from Two Tx Antennas.
17192 * @param[in] RgSchCellCb *cell
17193 * @param[in] RgSchUeCb *ue
17194 * @param[in] U8 wideCqi
17198 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt
17205 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi)
17211 ue->validTxAnt = 1;
17213 } /* rgSCHCmnFindUlCqiUlTxAnt */
17217 * @brief Indication of UL CQI.
17221 * Function : rgSCHCmnUlCqiInd
17223 * - Updates uplink CQI information for the UE. Computes and
17224 * stores the lowest CQI of CQIs reported in all subbands.
17226 * @param[in] RgSchCellCb *cell
17227 * @param[in] RgSchUeCb *ue
17228 * @param[in] TfuUlCqiRpt *ulCqiInfo
17232 PUBLIC Void rgSCHCmnUlCqiInd
17236 TfuUlCqiRpt *ulCqiInfo
17239 PUBLIC Void rgSCHCmnUlCqiInd(cell, ue, ulCqiInfo)
17242 TfuUlCqiRpt *ulCqiInfo;
17245 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17246 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17251 #if (defined(SCH_STATS) || defined(TENB_STATS))
17252 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
17255 TRC2(rgSCHCmnUlCqiInd);
17256 /* consider inputs from SRS handlers about SRS occassions
17257 * in determining the UL TX Antenna selection */
17258 ueUl->crntUlCqi[0] = ulCqiInfo->wideCqi;
17260 ueUl->validUlCqi = ueUl->crntUlCqi[0];
17261 ue->validTxAnt = 0;
17263 iTbsNew = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][ueUl->validUlCqi];
17264 previTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
17266 if (RG_ITBS_DIFF(iTbsNew, previTbs) > 5)
17268 /* Ignore this iTBS report and mark that last iTBS report was */
17269 /* ignored so that subsequently we reset the LA algorithm */
17270 ueUl->ulLaCb.lastiTbsIgnored = TRUE;
17274 if (ueUl->ulLaCb.lastiTbsIgnored != TRUE)
17276 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17277 (80 * ueUl->ulLaCb.cqiBasediTbs))/100;
17281 /* Reset the LA as iTbs in use caught up with the value */
17282 /* reported by UE. */
17283 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17284 (80 * previTbs * 100))/100;
17285 ueUl->ulLaCb.deltaiTbs = 0;
17286 ueUl->ulLaCb.lastiTbsIgnored = FALSE;
17291 rgSCHPwrUlCqiInd(cell, ue);
17293 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17295 rgSCHCmnSpsUlCqiInd(cell, ue);
17298 /* Applicable to only some schedulers */
17300 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
17302 cellSch->apisEmtcUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17307 cellSch->apisUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17311 ueUl->numCqiOccns++;
17312 ueUl->avgCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17317 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17318 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulNumCqi ++;
17319 cell->tenbStats->sch.ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17320 cell->tenbStats->sch.ulNumCqi ++;
17325 } /* rgSCHCmnUlCqiInd */
17328 * @brief Returns HARQ proc for which data expected now.
17332 * Function: rgSCHCmnUlHqProcForUe
17333 * Purpose: This function returns the harq process for
17334 * which data is expected in the current subframe.
17335 * It does not validate that the HARQ process
17336 * has an allocation.
17340 * @param[in] RgSchCellCb *cell
17341 * @param[in] CmLteTimingInfo frm
17342 * @param[in] RgSchUeCb *ue
17343 * @param[out] RgSchUlHqProcCb **procRef
17347 PUBLIC Void rgSCHCmnUlHqProcForUe
17350 CmLteTimingInfo frm,
17352 RgSchUlHqProcCb **procRef
17355 PUBLIC Void rgSCHCmnUlHqProcForUe(cell, frm, ue, procRef)
17357 CmLteTimingInfo frm;
17359 RgSchUlHqProcCb **procRef;
17363 U8 procId = rgSCHCmnGetUlHqProcIdx(&frm, cell);
17365 TRC2(rgSCHCmnUlHqProcForUe);
17367 *procRef = rgSCHUhmGetUlHqProc(cell, ue, procId);
17369 *procRef = rgSCHUhmGetUlProcByTime(cell, ue, frm);
17376 * @brief Update harq process for allocation.
17380 * Function : rgSCHCmnUpdUlHqProc
17382 * This function is invoked when harq process
17383 * control block is now in a new memory location
17384 * thus requiring a pointer/reference update.
17386 * @param[in] RgSchCellCb *cell
17387 * @param[in] RgSchUlHqProcCb *curProc
17388 * @param[in] RgSchUlHqProcCb *oldProc
17394 PUBLIC S16 rgSCHCmnUpdUlHqProc
17397 RgSchUlHqProcCb *curProc,
17398 RgSchUlHqProcCb *oldProc
17401 PUBLIC S16 rgSCHCmnUpdUlHqProc(cell, curProc, oldProc)
17403 RgSchUlHqProcCb *curProc;
17404 RgSchUlHqProcCb *oldProc;
17407 TRC2(rgSCHCmnUpdUlHqProc);
17411 #if (ERRCLASS & ERRCLS_DEBUG)
17412 if (curProc->alloc == NULLP)
17417 curProc->alloc->hqProc = curProc;
17419 } /* rgSCHCmnUpdUlHqProc */
17422 /*MS_WORKAROUND for CR FIXME */
17424 * @brief Hsndles BSR timer expiry
17428 * Function : rgSCHCmnBsrTmrExpry
17430 * This function is invoked when periodic BSR timer expires for a UE.
17432 * @param[in] RgSchUeCb *ue
17438 PUBLIC S16 rgSCHCmnBsrTmrExpry
17443 PUBLIC S16 rgSCHCmnBsrTmrExpry(ueCb)
17447 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ueCb->cell);
17449 TRC2(rgSCHCmnBsrTmrExpry)
17451 ueCb->isSrGrant = TRUE;
17454 emtcStatsUlBsrTmrTxp++;
17458 if(ueCb->cell->emtcEnable)
17462 cellSch->apisEmtcUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17469 cellSch->apisUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17476 * @brief Short BSR update.
17480 * Function : rgSCHCmnUpdBsrShort
17482 * This functions does requisite updates to handle short BSR reporting.
17484 * @param[in] RgSchCellCb *cell
17485 * @param[in] RgSchUeCb *ue
17486 * @param[in] RgSchLcgCb *ulLcg
17487 * @param[in] U8 bsr
17488 * @param[out] RgSchErrInfo *err
17494 PUBLIC S16 rgSCHCmnUpdBsrShort
17503 PUBLIC S16 rgSCHCmnUpdBsrShort(cell, ue, ulLcg, bsr, err)
17513 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17515 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17516 RgSchCmnLcg *cmnLcg = NULLP;
17521 TRC2(rgSCHCmnUpdBsrShort);
17523 if (!RGSCH_LCG_ISCFGD(ulLcg))
17525 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17528 for (lcgCnt=0; lcgCnt<4; lcgCnt++)
17531 /* Set BS of all other LCGs to Zero.
17532 If Zero BSR is reported in Short BSR include this LCG too */
17533 if ((lcgCnt != ulLcg->lcgId) ||
17534 (!bsr && !ueUl->hqEnt.numBusyHqProcs))
17536 /* If old BO is zero do nothing */
17537 if(((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs != 0)
17539 for(idx = 0; idx < ue->ul.lcgArr[lcgCnt].numLch; idx++)
17541 if((ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount) &&
17542 (ue->ulActiveLCs & (1 <<
17543 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1))))
17546 ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount--;
17547 ue->ulActiveLCs &= ~(1 <<
17548 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1));
17554 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgCnt]))
17556 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs = 0;
17557 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->reportedBs = 0;
17562 if(ulLcg->lcgId && bsr && (((RgSchCmnLcg *)(ulLcg->sch))->bs == 0))
17564 for(idx = 0; idx < ulLcg->numLch; idx++)
17567 if (!(ue->ulActiveLCs & (1 << (ulLcg->lcArray[idx]->qciCb->qci -1))))
17569 ulLcg->lcArray[idx]->qciCb->ulUeCount++;
17570 ue->ulActiveLCs |= (1 << (ulLcg->lcArray[idx]->qciCb->qci -1));
17575 /* Resetting the nonGbrLcgBs info here */
17576 ue->ul.nonGbrLcgBs = 0;
17577 ue->ul.nonLcg0Bs = 0;
17579 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17581 if (TRUE == ue->ul.useExtBSRSizes)
17583 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17587 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17589 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17591 /* TBD check for effGbr != 0 */
17592 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17594 else if (0 == ulLcg->lcgId)
17596 /* This is added for handling LCG0 */
17597 cmnLcg->bs = cmnLcg->reportedBs;
17601 /* Update non GBR LCG's BS*/
17602 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17603 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17605 ue->ul.totalBsr = cmnLcg->bs;
17608 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (bsr == 0))
17610 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17614 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17616 rgSCHCmnSpsBsrRpt(cell, ue, ulLcg);
17619 rgSCHCmnUpdUlCompEffBsr(ue);
17622 if(cell->emtcEnable)
17626 cellSch->apisEmtcUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17633 cellSch->apisUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17637 if (ue->ul.isUlCaEnabled && ue->numSCells)
17639 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17641 #ifndef PAL_ENABLE_UL_CA
17642 if((ue->cellInfo[sCellIdx] != NULLP) &&
17643 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17645 if(ue->cellInfo[sCellIdx] != NULLP)
17648 cellSch->apisUl->rgSCHUpdBsrShort(ue->cellInfo[sCellIdx]->cell,
17659 * @brief Truncated BSR update.
17663 * Function : rgSCHCmnUpdBsrTrunc
17665 * This functions does required updates to handle truncated BSR report.
17668 * @param[in] RgSchCellCb *cell
17669 * @param[in] RgSchUeCb *ue
17670 * @param[in] RgSchLcgCb *ulLcg
17671 * @param[in] U8 bsr
17672 * @param[out] RgSchErrInfo *err
17678 PUBLIC S16 rgSCHCmnUpdBsrTrunc
17687 PUBLIC S16 rgSCHCmnUpdBsrTrunc(cell, ue, ulLcg, bsr, err)
17695 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17696 RgSchCmnLcg *cmnLcg = NULLP;
17702 TRC2(rgSCHCmnUpdBsrTrunc);
17704 if (!RGSCH_LCG_ISCFGD(ulLcg))
17706 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17709 /* set all higher prio lcgs bs to 0 and update this lcgs bs and
17710 total bsr= sumofall lcgs bs */
17713 for (cnt = ulLcg->lcgId-1; cnt >= 0; cnt--)
17716 /* If Existing BO is zero the don't do anything */
17717 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs != 0)
17719 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17722 if((ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount) &&
17723 (ue->ulActiveLCs & (1 <<
17724 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17726 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount--;
17727 ue->ulActiveLCs &= ~(1 <<
17728 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17733 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs = 0;
17734 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->reportedBs = 0;
17739 for (cnt = ulLcg->lcgId; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17741 if (ulLcg->lcgId == 0)
17745 /* If Existing BO is zero the don't do anything */
17746 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs == 0)
17748 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17751 if (!(ue->ulActiveLCs & (1 <<
17752 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17754 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount++;
17755 ue->ulActiveLCs |= (1 <<
17756 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17762 ue->ul.nonGbrLcgBs = 0;
17763 ue->ul.nonLcg0Bs = 0;
17764 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17765 if (TRUE == ue->ul.useExtBSRSizes)
17767 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17771 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17773 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17775 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17777 else if(ulLcg->lcgId == 0)
17779 /* This is for handeling LCG0 */
17780 cmnLcg->bs = cmnLcg->reportedBs;
17784 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
17785 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17787 ue->ul.totalBsr = cmnLcg->bs;
17789 for (cnt = ulLcg->lcgId+1; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17791 /* TODO: The bs for the other LCGs may be stale because some or all of
17792 * the part of bs may have been already scheduled/data received. Please
17793 * consider this when truncated BSR is tested/implemented */
17794 ue->ul.totalBsr += ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs;
17797 rgSCHCmnUpdUlCompEffBsr(ue);
17800 if(cell->emtcEnable)
17804 cellSch->apisEmtcUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17811 cellSch->apisUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17815 if (ue->ul.isUlCaEnabled && ue->numSCells)
17817 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17819 #ifndef PAL_ENABLE_UL_CA
17820 if((ue->cellInfo[sCellIdx] != NULLP) &&
17821 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17823 if(ue->cellInfo[sCellIdx] != NULLP)
17826 cellSch->apisUl->rgSCHUpdBsrTrunc(ue->cellInfo[sCellIdx]->cell, ue, ulLcg, bsr);
17836 * @brief Long BSR update.
17840 * Function : rgSCHCmnUpdBsrLong
17842 * - Update BSRs for all configured LCGs.
17843 * - Update priority of LCGs if needed.
17844 * - Update UE's position within/across uplink scheduling queues.
17847 * @param[in] RgSchCellCb *cell
17848 * @param[in] RgSchUeCb *ue
17849 * @param[in] U8 bsArr[]
17850 * @param[out] RgSchErrInfo *err
17856 PUBLIC S16 rgSCHCmnUpdBsrLong
17864 PUBLIC S16 rgSCHCmnUpdBsrLong(cell, ue, bsArr, err)
17871 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17872 U32 tmpBsArr[4] = {0, 0, 0, 0};
17880 TRC2(rgSCHCmnUpdBsrLong);
17883 for(idx1 = 1; idx1 < RGSCH_MAX_LCG_PER_UE; idx1++)
17885 /* If Old BO is non zero then do nothing */
17886 if ((((RgSchCmnLcg *)(ue->ul.lcgArr[idx1].sch))->bs == 0)
17889 for(idx2 = 0; idx2 < ue->ul.lcgArr[idx1].numLch; idx2++)
17892 if (!(ue->ulActiveLCs & (1 <<
17893 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1))))
17895 ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->ulUeCount++;
17896 ue->ulActiveLCs |= (1 <<
17897 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1));
17903 ue->ul.nonGbrLcgBs = 0;
17904 ue->ul.nonLcg0Bs = 0;
17906 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[0]))
17908 if (TRUE == ue->ul.useExtBSRSizes)
17910 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnExtBsrTbl[bsArr[0]];
17911 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnExtBsrTbl[bsArr[0]];
17912 tmpBsArr[0] = rgSchCmnExtBsrTbl[bsArr[0]];
17916 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnBsrTbl[bsArr[0]];
17917 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnBsrTbl[bsArr[0]];
17918 tmpBsArr[0] = rgSchCmnBsrTbl[bsArr[0]];
17921 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
17923 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
17925 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
17927 if (TRUE == ue->ul.useExtBSRSizes)
17929 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsArr[lcgId]];
17933 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsArr[lcgId]];
17935 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17937 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17938 tmpBsArr[lcgId] = cmnLcg->bs;
17942 nonGbrBs += cmnLcg->reportedBs;
17943 tmpBsArr[lcgId] = cmnLcg->reportedBs;
17944 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17948 ue->ul.nonGbrLcgBs = RGSCH_MIN(nonGbrBs,ue->ul.effAmbr);
17950 ue->ul.totalBsr = tmpBsArr[0] + tmpBsArr[1] + tmpBsArr[2] + tmpBsArr[3];
17952 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (ue->ul.totalBsr == 0))
17954 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17959 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE) /* SPS_FIX */
17961 if(ue->ul.totalBsr - tmpBsArr[1] == 0)
17962 {/* Updaing the BSR to SPS only if LCG1 BS is present in sps active state */
17963 rgSCHCmnSpsBsrRpt(cell, ue, &ue->ul.lcgArr[1]);
17967 rgSCHCmnUpdUlCompEffBsr(ue);
17970 if(cell->emtcEnable)
17974 cellSch->apisEmtcUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17981 cellSch->apisUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17985 if (ue->ul.isUlCaEnabled && ue->numSCells)
17987 for(U8 idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
17989 #ifndef PAL_ENABLE_UL_CA
17990 if((ue->cellInfo[idx] != NULLP) &&
17991 (ue->cellInfo[idx]->sCellState == RG_SCH_SCELL_ACTIVE))
17993 if(ue->cellInfo[idx] != NULLP)
17996 cellSch->apisUl->rgSCHUpdBsrLong(ue->cellInfo[idx]->cell, ue, bsArr);
18006 * @brief PHR update.
18010 * Function : rgSCHCmnUpdExtPhr
18012 * Updates extended power headroom information for an UE.
18014 * @param[in] RgSchCellCb *cell
18015 * @param[in] RgSchUeCb *ue
18016 * @param[in] U8 phr
18017 * @param[out] RgSchErrInfo *err
18023 PUBLIC S16 rgSCHCmnUpdExtPhr
18027 RgInfExtPhrCEInfo *extPhr,
18031 PUBLIC S16 rgSCHCmnUpdExtPhr(cell, ue, extPhr, err)
18034 RgInfExtPhrCEInfo *extPhr;
18038 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18039 RgSchCmnAllocRecord *allRcd;
18040 CmLList *node = ueUl->ulAllocLst.last;
18043 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18045 TRC2(rgSCHCmnUpdExtPhr);
18051 allRcd = (RgSchCmnAllocRecord *)node->node;
18053 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18055 rgSCHPwrUpdExtPhr(cell, ue, extPhr, allRcd);
18060 if(ulSpsUe->isUlSpsActv)
18062 rgSCHCmnSpsPhrInd(cell,ue);
18067 } /* rgSCHCmnUpdExtPhr */
18073 * @brief PHR update.
18077 * Function : rgSCHCmnUpdPhr
18079 * Updates power headroom information for an UE.
18081 * @param[in] RgSchCellCb *cell
18082 * @param[in] RgSchUeCb *ue
18083 * @param[in] U8 phr
18084 * @param[out] RgSchErrInfo *err
18090 PUBLIC S16 rgSCHCmnUpdPhr
18098 PUBLIC S16 rgSCHCmnUpdPhr(cell, ue, phr, err)
18105 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18106 RgSchCmnAllocRecord *allRcd;
18107 CmLList *node = ueUl->ulAllocLst.last;
18110 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18112 TRC2(rgSCHCmnUpdPhr);
18118 allRcd = (RgSchCmnAllocRecord *)node->node;
18120 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18122 rgSCHPwrUpdPhr(cell, ue, phr, allRcd, RG_SCH_CMN_PWR_USE_CFG_MAX_PWR);
18127 if(ulSpsUe->isUlSpsActv)
18129 rgSCHCmnSpsPhrInd(cell,ue);
18134 } /* rgSCHCmnUpdPhr */
18137 * @brief UL grant for contention resolution.
18141 * Function : rgSCHCmnContResUlGrant
18143 * Add UE to another queue specifically for CRNTI based contention
18147 * @param[in] RgSchUeCb *ue
18148 * @param[out] RgSchErrInfo *err
18154 PUBLIC S16 rgSCHCmnContResUlGrant
18161 PUBLIC S16 rgSCHCmnContResUlGrant(cell, ue, err)
18167 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18168 TRC2(rgSCHCmnContResUlGrant);
18171 if(cell->emtcEnable)
18175 cellSch->apisEmtcUl->rgSCHContResUlGrant(cell, ue);
18182 cellSch->apisUl->rgSCHContResUlGrant(cell, ue);
18188 * @brief SR reception handling.
18192 * Function : rgSCHCmnSrRcvd
18194 * - Update UE's position within/across uplink scheduling queues
18195 * - Update priority of LCGs if needed.
18197 * @param[in] RgSchCellCb *cell
18198 * @param[in] RgSchUeCb *ue
18199 * @param[in] CmLteTimingInfo frm
18200 * @param[out] RgSchErrInfo *err
18206 PUBLIC S16 rgSCHCmnSrRcvd
18210 CmLteTimingInfo frm,
18214 PUBLIC S16 rgSCHCmnSrRcvd(cell, ue, frm, err)
18217 CmLteTimingInfo frm;
18221 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18222 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18223 CmLList *node = ueUl->ulAllocLst.last;
18225 TRC2(rgSCHCmnSrRcvd);
18228 emtcStatsUlTomSrInd++;
18231 RGSCH_INCR_SUB_FRAME(frm, 1); /* 1 TTI after the time SR was sent */
18234 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)node->node;
18235 if (RGSCH_TIMEINFO_SAME(frm, allRcd->allocTime))
18241 //TODO_SID Need to check when it is getting triggered
18242 ue->isSrGrant = TRUE;
18244 if(cell->emtcEnable)
18248 cellSch->apisEmtcUl->rgSCHSrRcvd(cell, ue);
18255 cellSch->apisUl->rgSCHSrRcvd(cell, ue);
18261 * @brief Returns first uplink allocation to send reception
18266 * Function: rgSCHCmnFirstRcptnReq(cell)
18267 * Purpose: This function returns the first uplink allocation
18268 * (or NULLP if there is none) in the subframe
18269 * in which is expected to prepare and send reception
18274 * @param[in] RgSchCellCb *cell
18275 * @return RgSchUlAlloc*
18278 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq
18283 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq(cell)
18287 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18289 RgSchUlAlloc* alloc = NULLP;
18291 TRC2(rgSCHCmnFirstRcptnReq);
18293 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18295 RgSchUlSf* sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18296 alloc = rgSCHUtlUlAllocFirst(sf);
18298 if (alloc && alloc->hqProc == NULLP)
18300 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18308 * @brief Returns first uplink allocation to send reception
18313 * Function: rgSCHCmnNextRcptnReq(cell)
18314 * Purpose: This function returns the next uplink allocation
18315 * (or NULLP if there is none) in the subframe
18316 * in which is expected to prepare and send reception
18321 * @param[in] RgSchCellCb *cell
18322 * @return RgSchUlAlloc*
18325 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq
18328 RgSchUlAlloc *alloc
18331 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq(cell, alloc)
18333 RgSchUlAlloc *alloc;
18336 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18338 //RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18340 TRC2(rgSCHCmnNextRcptnReq);
18342 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18344 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18346 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18347 if (alloc && alloc->hqProc == NULLP)
18349 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18360 * @brief Collates DRX enabled UE's scheduled in this SF
18364 * Function: rgSCHCmnDrxStrtInActvTmrInUl(cell)
18365 * Purpose: This function collates the link
18366 * of UE's scheduled in this SF who
18367 * have drx enabled. It then calls
18368 * DRX specific function to start/restart
18369 * inactivity timer in Ul
18373 * @param[in] RgSchCellCb *cell
18377 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl
18382 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl(cell)
18386 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18387 RgSchUlSf *sf = &(cellUl->ulSfArr[cellUl->schdIdx]);
18388 RgSchUlAlloc *alloc = rgSCHUtlUlAllocFirst(sf);
18393 TRC2(rgSCHCmnDrxStrtInActvTmrInUl);
18395 cmLListInit(&ulUeLst);
18403 if (!(alloc->grnt.isRtx) && ueCb->isDrxEnabled && !(ueCb->isSrGrant)
18405 /* ccpu00139513- DRX inactivity timer should not be started for
18406 * UL SPS occasions */
18407 && (alloc->hqProc->isSpsOccnHqP == FALSE)
18411 cmLListAdd2Tail(&ulUeLst,&(ueCb->ulDrxInactvTmrLnk));
18412 ueCb->ulDrxInactvTmrLnk.node = (PTR)ueCb;
18416 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18419 (Void)rgSCHDrxStrtInActvTmr(cell,&ulUeLst,RG_SCH_DRX_UL);
18426 * @brief Returns first uplink allocation to send HARQ feedback
18431 * Function: rgSCHCmnFirstHqFdbkAlloc
18432 * Purpose: This function returns the first uplink allocation
18433 * (or NULLP if there is none) in the subframe
18434 * for which it is expected to prepare and send HARQ
18439 * @param[in] RgSchCellCb *cell
18440 * @param[in] U8 idx
18441 * @return RgSchUlAlloc*
18444 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc
18450 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc(cell, idx)
18455 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18457 RgSchUlAlloc *alloc = NULLP;
18459 TRC2(rgSCHCmnFirstHqFdbkAlloc);
18461 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18463 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18464 alloc = rgSCHUtlUlAllocFirst(sf);
18466 while (alloc && (alloc->hqProc == NULLP))
18468 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18476 * @brief Returns next allocation to send HARQ feedback for.
18480 * Function: rgSCHCmnNextHqFdbkAlloc(cell)
18481 * Purpose: This function returns the next uplink allocation
18482 * (or NULLP if there is none) in the subframe
18483 * for which HARQ feedback needs to be sent.
18487 * @param[in] RgSchCellCb *cell
18488 * @return RgSchUlAlloc*
18491 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc
18494 RgSchUlAlloc *alloc,
18498 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc(cell, alloc, idx)
18500 RgSchUlAlloc *alloc;
18504 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18505 TRC2(rgSCHCmnNextHqFdbkAlloc);
18507 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18509 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18511 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18512 while (alloc && (alloc->hqProc == NULLP))
18514 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18524 /***********************************************************
18526 * Func : rgSCHCmnUlGetITbsFrmIMcs
18528 * Desc : Returns the Itbs that is mapped to an Imcs
18529 * for the case of uplink.
18537 **********************************************************/
18539 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs
18544 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs(iMcs)
18548 TRC2(rgSCHCmnUlGetITbsFrmIMcs);
18550 RETVALUE(rgUlIMcsTbl[iMcs].iTbs);
18553 /***********************************************************
18555 * Func : rgSCHCmnUlGetIMcsFrmITbs
18557 * Desc : Returns the Imcs that is mapped to an Itbs
18558 * for the case of uplink.
18562 * Notes: For iTbs 19, iMcs is dependant on modulation order.
18563 * Refer to 36.213, Table 8.6.1-1 and 36.306 Table 4.1-2
18564 * for UE capability information
18568 **********************************************************/
18570 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs
18573 CmLteUeCategory ueCtg
18576 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg)
18578 CmLteUeCategory ueCtg;
18582 TRC2(rgSCHCmnUlGetIMcsFrmITbs);
18588 /*a higher layer can force a 64QAM UE to transmit at 16QAM.
18589 * We currently do not support this. Once the support for such
18590 * is added, ueCtg should be replaced by current transmit
18591 * modulation configuration.Refer to 36.213 -8.6.1
18593 else if ( iTbs < 19 )
18597 else if ((iTbs == 19) && (ueCtg != CM_LTE_UE_CAT_5))
18607 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
18608 was seen when IMCS exceeds 20 on T2k TDD*/
18618 /***********************************************************
18620 * Func : rgSCHCmnUlMinTbBitsForITbs
18622 * Desc : Returns the minimum number of bits that can
18623 * be given as grant for a specific CQI.
18631 **********************************************************/
18633 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs
18635 RgSchCmnUlCell *cellUl,
18639 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs)
18640 RgSchCmnUlCell *cellUl;
18644 TRC2(rgSCHCmnUlMinTbBitsForITbs);
18646 RGSCH_ARRAY_BOUND_CHECK(0, rgTbSzTbl[0], iTbs);
18648 RETVALUE(rgTbSzTbl[0][iTbs][cellUl->sbSize-1]);
18651 /***********************************************************
18653 * Func : rgSCHCmnUlSbAlloc
18655 * Desc : Given a required 'number of subbands' and a hole,
18656 * returns a suitable alloc such that the subband
18657 * allocation size is valid
18661 * Notes: Does not assume either passed numSb or hole size
18662 * to be valid for allocation, and hence arrives at
18663 * an acceptable value.
18666 **********************************************************/
18668 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc
18675 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc(sf, numSb, hole)
18681 U8 holeSz; /* valid hole size */
18682 RgSchUlAlloc *alloc;
18683 TRC2(rgSCHCmnUlSbAlloc);
18685 if ((holeSz = rgSchCmnMult235Tbl[hole->num].prvMatch) == hole->num)
18687 numSb = rgSchCmnMult235Tbl[numSb].match;
18688 if (numSb >= holeSz)
18690 alloc = rgSCHUtlUlAllocGetCompHole(sf, hole);
18694 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18699 if (numSb < holeSz)
18701 numSb = rgSchCmnMult235Tbl[numSb].match;
18705 numSb = rgSchCmnMult235Tbl[numSb].prvMatch;
18708 if ( numSb >= holeSz )
18712 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18718 * @brief To fill the RgSchCmnUeUlAlloc structure of UeCb.
18722 * Function: rgSCHCmnUlUeFillAllocInfo
18723 * Purpose: Specific scheduler to call this API to fill the alloc
18726 * Invoked by: Scheduler
18728 * @param[in] RgSchCellCb *cell
18729 * @param[out] RgSchUeCb *ue
18733 PUBLIC Void rgSCHCmnUlUeFillAllocInfo
18739 PUBLIC Void rgSCHCmnUlUeFillAllocInfo(cell, ue)
18744 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18745 RgSchCmnUeUlAlloc *ulAllocInfo;
18746 RgSchCmnUlUe *ueUl;
18748 TRC2(rgSCHCmnUlUeFillAllocInfo);
18750 ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18751 ulAllocInfo = &ueUl->alloc;
18753 /* Fill alloc structure */
18754 rgSCHCmnUlAllocFillTpc(cell, ue, ulAllocInfo->alloc);
18755 rgSCHCmnUlAllocFillNdmrs(cellUl, ulAllocInfo->alloc);
18756 rgSCHCmnUlAllocLnkHqProc(ue, ulAllocInfo->alloc, ulAllocInfo->alloc->hqProc,
18757 ulAllocInfo->alloc->hqProc->isRetx);
18759 rgSCHCmnUlFillPdcchWithAlloc(ulAllocInfo->alloc->pdcch,
18760 ulAllocInfo->alloc, ue);
18761 /* Recording information about this allocation */
18762 rgSCHCmnUlRecordUeAlloc(cell, ue);
18764 /* Update the UE's outstanding allocation */
18765 if (!ulAllocInfo->alloc->hqProc->isRetx)
18767 rgSCHCmnUlUpdOutStndAlloc(cell, ue, ulAllocInfo->allocdBytes);
18774 * @brief Update the UEs outstanding alloc based on the BSR report's timing.
18779 * Function: rgSCHCmnUpdUlCompEffBsr
18780 * Purpose: Clear off all the allocations from outstanding allocation that
18781 * are later than or equal to BSR timing information (stored in UEs datIndTime).
18783 * Invoked by: Scheduler
18785 * @param[in] RgSchUeCb *ue
18789 PRIVATE Void rgSCHCmnUpdUlCompEffBsr
18794 PRIVATE Void rgSCHCmnUpdUlCompEffBsr(ue)
18798 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,ue->cell);
18799 CmLList *node = ueUl->ulAllocLst.last;
18800 RgSchCmnAllocRecord *allRcd;
18801 U32 outStndAlloc=0;
18802 U32 nonLcg0OutStndAllocBs=0;
18805 RgSchCmnLcg *cmnLcg = NULLP;
18806 TRC2(rgSCHCmnUpdUlCompEffBsr);
18810 allRcd = (RgSchCmnAllocRecord *)node->node;
18811 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18820 allRcd = (RgSchCmnAllocRecord *)node->node;
18822 outStndAlloc += allRcd->alloc;
18825 cmnLcg = (RgSchCmnLcg *)(ue->ul.lcgArr[0].sch);
18826 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
18827 if (cmnLcg->bs > outStndAlloc)
18829 cmnLcg->bs -= outStndAlloc;
18830 ue->ul.minReqBytes = cmnLcg->bs;
18835 nonLcg0OutStndAllocBs = outStndAlloc - cmnLcg->bs;
18839 for(lcgId = 1;lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
18841 if(RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
18843 cmnLcg = ((RgSchCmnLcg *) (ue->ul.lcgArr[lcgId].sch));
18844 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
18846 nonLcg0Bsr += cmnLcg->bs;
18850 nonLcg0Bsr += ue->ul.nonGbrLcgBs;
18851 if (nonLcg0OutStndAllocBs > nonLcg0Bsr)
18857 nonLcg0Bsr -= nonLcg0OutStndAllocBs;
18859 ue->ul.nonLcg0Bs = nonLcg0Bsr;
18860 /* Cap effBsr with nonLcg0Bsr and append lcg0 bs.
18861 * nonLcg0Bsr limit applies only to lcg1,2,3 */
18862 /* better be handled in individual scheduler */
18863 ue->ul.effBsr = nonLcg0Bsr +\
18864 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
18869 * @brief Records information about the current allocation.
18873 * Function: rgSCHCmnUlRecordUeAlloc
18874 * Purpose: Records information about the curent allocation.
18875 * This includes the allocated bytes, as well
18876 * as some power information.
18878 * Invoked by: Scheduler
18880 * @param[in] RgSchCellCb *cell
18881 * @param[in] RgSchUeCb *ue
18885 PUBLIC Void rgSCHCmnUlRecordUeAlloc
18891 PUBLIC Void rgSCHCmnUlRecordUeAlloc(cell, ue)
18897 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18899 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18900 CmLListCp *lst = &ueUl->ulAllocLst;
18901 CmLList *node = ueUl->ulAllocLst.first;
18902 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18903 RgSchCmnUeUlAlloc *ulAllocInfo = &ueUl->alloc;
18904 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
18905 TRC2(rgSCHCmnUlRecordUeAlloc);
18907 cmLListDelFrm(lst, &allRcd->lnk);
18909 /* To the crntTime, add the MIN time at which UE will
18910 * actually send the BSR i.e DELTA+4 */
18911 allRcd->allocTime = cell->crntTime;
18912 /*ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
18914 if(ue->isEmtcUe == TRUE)
18916 RGSCH_INCR_SUB_FRAME_EMTC(allRcd->allocTime,
18917 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18922 RGSCH_INCR_SUB_FRAME(allRcd->allocTime,
18923 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18926 allRcd->allocTime = cellUl->schdTime;
18928 cmLListAdd2Tail(lst, &allRcd->lnk);
18930 /* Filling in the parameters to be recorded */
18931 allRcd->alloc = ulAllocInfo->allocdBytes;
18932 //allRcd->numRb = ulAllocInfo->alloc->grnt.numRb;
18933 allRcd->numRb = (ulAllocInfo->alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
18934 /*Recording the UL CQI derived from the maxUlCqi */
18935 allRcd->cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
18936 allRcd->tpc = ulAllocInfo->alloc->grnt.tpc;
18938 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18940 cell->measurements.ulBytesCnt += ulAllocInfo->allocdBytes;
18945 /** PHR handling for MSG3
18946 * @brief Records allocation information of msg3 in the the UE.
18950 * Function: rgSCHCmnUlRecMsg3Alloc
18951 * Purpose: Records information about msg3 allocation.
18952 * This includes the allocated bytes, as well
18953 * as some power information.
18955 * Invoked by: Scheduler
18957 * @param[in] RgSchCellCb *cell
18958 * @param[in] RgSchUeCb *ue
18959 * @param[in] RgSchRaCb *raCb
18963 PUBLIC Void rgSCHCmnUlRecMsg3Alloc
18970 PUBLIC Void rgSCHCmnUlRecMsg3Alloc(cell, ue, raCb)
18976 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18977 CmLListCp *lst = &ueUl->ulAllocLst;
18978 CmLList *node = ueUl->ulAllocLst.first;
18979 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18981 /* Stack Crash problem for TRACE5 changes */
18982 TRC2(rgSCHCmnUlRecMsg3Alloc);
18984 cmLListDelFrm(lst, node);
18985 allRcd->allocTime = raCb->msg3AllocTime;
18986 cmLListAdd2Tail(lst, node);
18988 /* Filling in the parameters to be recorded */
18989 allRcd->alloc = raCb->msg3Grnt.datSz;
18990 allRcd->numRb = raCb->msg3Grnt.numRb;
18991 allRcd->cqi = raCb->ccchCqi;
18992 allRcd->tpc = raCb->msg3Grnt.tpc;
18994 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18999 * @brief Keeps track of the most recent RG_SCH_CMN_MAX_ALLOC_TRACK
19000 * allocations to track. Adds this allocation to the ueUl's ulAllocLst.
19005 * Function: rgSCHCmnUlUpdOutStndAlloc
19006 * Purpose: Recent Allocation shall be at First Pos'n.
19007 * Remove the last node, update the fields
19008 * with the new allocation and add at front.
19010 * Invoked by: Scheduler
19012 * @param[in] RgSchCellCb *cell
19013 * @param[in] RgSchUeCb *ue
19014 * @param[in] U32 alloc
19018 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc
19025 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc(cell, ue, alloc)
19031 U32 nonLcg0Alloc=0;
19032 TRC2(rgSCHCmnUlUpdOutStndAlloc);
19034 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
19035 if (((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs > alloc)
19037 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs -= alloc;
19041 nonLcg0Alloc = alloc - ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19042 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = 0;
19045 if (nonLcg0Alloc >= ue->ul.nonLcg0Bs)
19047 ue->ul.nonLcg0Bs = 0;
19051 ue->ul.nonLcg0Bs -= nonLcg0Alloc;
19053 /* Cap effBsr with effAmbr and append lcg0 bs.
19054 * effAmbr limit applies only to lcg1,2,3 non GBR LCG's*/
19055 /* better be handled in individual scheduler */
19056 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
19057 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19059 if (ue->ul.effBsr == 0)
19061 if (ue->bsrTmr.tmrEvnt != TMR_NONE)
19063 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
19066 if (FALSE == ue->isSrGrant)
19068 if (ue->ul.bsrTmrCfg.isPrdBsrTmrPres)
19071 rgSCHTmrStartTmr(cell, ue, RG_SCH_TMR_BSR,
19072 ue->ul.bsrTmrCfg.prdBsrTmr);
19078 /* Resetting UEs lower Cap */
19079 ue->ul.minReqBytes = 0;
19086 * @brief Returns the "Itbs" for a given UE.
19090 * Function: rgSCHCmnUlGetITbs
19091 * Purpose: This function returns the "Itbs" for a given UE.
19093 * Invoked by: Scheduler
19095 * @param[in] RgSchUeCb *ue
19099 PUBLIC U8 rgSCHCmnUlGetITbs
19106 PUBLIC U8 rgSCHCmnUlGetITbs(cell, ue, isEcp)
19112 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
19113 /* CQI will be capped to maxUlCqi for 16qam UEs */
19114 CmLteUeCategory ueCtgy = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
19118 U8 maxiTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ueUl->maxUlCqi];
19121 TRC2(rgSCHCmnUlGetITbs);
19123 /* #ifdef RG_SCH_CMN_EXT_CP_SUP For ECP pick index 1 */
19125 if ( (ueCtgy != CM_LTE_UE_CAT_5) &&
19126 (ueUl->validUlCqi > ueUl->maxUlCqi)
19129 cqi = ueUl->maxUlCqi;
19133 cqi = ueUl->validUlCqi;
19137 iTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
19139 RG_SCH_CHK_ITBS_RANGE(iTbs, maxiTbs);
19141 iTbs = RGSCH_MIN(iTbs, ue->cell->thresholds.maxUlItbs);
19144 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
19145 was seen when IMCS exceeds 20 on T2k TDD */
19154 if ( (ueCtgy != CM_LTE_UE_CAT_5) && (ueUl->crntUlCqi[0] > ueUl->maxUlCqi ))
19156 cqi = ueUl->maxUlCqi;
19160 cqi = ueUl->crntUlCqi[0];
19163 RETVALUE(rgSchCmnUlCqiToTbsTbl[(U8)isEcp][cqi]);
19167 * @brief This function adds the UE to DLRbAllocInfo TX lst.
19171 * Function: rgSCHCmnDlRbInfoAddUeTx
19172 * Purpose: This function adds the UE to DLRbAllocInfo TX lst.
19174 * Invoked by: Common Scheduler
19176 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19177 * @param[in] RgSchUeCb *ue
19178 * @param[in] RgSchDlHqProcCb *hqP
19183 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx
19186 RgSchCmnDlRbAllocInfo *allocInfo,
19188 RgSchDlHqProcCb *hqP
19191 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx(cell, allocInfo, ue, hqP)
19193 RgSchCmnDlRbAllocInfo *allocInfo;
19195 RgSchDlHqProcCb *hqP;
19198 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
19200 TRC2(rgSCHCmnDlRbInfoAddUeTx);
19202 if (hqP->reqLnk.node == NULLP)
19204 if (cellSch->dl.isDlFreqSel)
19206 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19207 &allocInfo->dedAlloc.txHqPLst, hqP);
19212 cmLListAdd2Tail(&allocInfo->dedAlloc.txHqPLst, &hqP->reqLnk);
19214 hqP->reqLnk.node = (PTR)hqP;
19221 * @brief This function adds the UE to DLRbAllocInfo RETX lst.
19225 * Function: rgSCHCmnDlRbInfoAddUeRetx
19226 * Purpose: This function adds the UE to DLRbAllocInfo RETX lst.
19228 * Invoked by: Common Scheduler
19230 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19231 * @param[in] RgSchUeCb *ue
19232 * @param[in] RgSchDlHqProcCb *hqP
19237 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx
19240 RgSchCmnDlRbAllocInfo *allocInfo,
19242 RgSchDlHqProcCb *hqP
19245 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx(cell, allocInfo, ue, hqP)
19247 RgSchCmnDlRbAllocInfo *allocInfo;
19249 RgSchDlHqProcCb *hqP;
19252 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19254 TRC2(rgSCHCmnDlRbInfoAddUeRetx);
19256 if (cellSch->dl.isDlFreqSel)
19258 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19259 &allocInfo->dedAlloc.retxHqPLst, hqP);
19263 /* checking UE's presence in this lst is unnecessary */
19264 cmLListAdd2Tail(&allocInfo->dedAlloc.retxHqPLst, &hqP->reqLnk);
19265 hqP->reqLnk.node = (PTR)hqP;
19271 * @brief This function adds the UE to DLRbAllocInfo TX-RETX lst.
19275 * Function: rgSCHCmnDlRbInfoAddUeRetxTx
19276 * Purpose: This adds the UE to DLRbAllocInfo TX-RETX lst.
19278 * Invoked by: Common Scheduler
19280 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19281 * @param[in] RgSchUeCb *ue
19282 * @param[in] RgSchDlHqProcCb *hqP
19287 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx
19290 RgSchCmnDlRbAllocInfo *allocInfo,
19292 RgSchDlHqProcCb *hqP
19295 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx(allocInfo, ue, hqP)
19297 RgSchCmnDlRbAllocInfo *allocInfo;
19299 RgSchDlHqProcCb *hqP;
19302 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19304 TRC2(rgSCHCmnDlRbInfoAddUeRetxTx);
19306 if (cellSch->dl.isDlFreqSel)
19308 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19309 &allocInfo->dedAlloc.txRetxHqPLst, hqP);
19313 cmLListAdd2Tail(&allocInfo->dedAlloc.txRetxHqPLst, &hqP->reqLnk);
19314 hqP->reqLnk.node = (PTR)hqP;
19320 * @brief This function adds the UE to DLRbAllocInfo NonSchdRetxLst.
19324 * Function: rgSCHCmnDlAdd2NonSchdRetxLst
19325 * Purpose: During RB estimation for RETX, if allocation fails
19326 * then appending it to NonSchdRetxLst, the further
19327 * action is taken as part of Finalization in
19328 * respective schedulers.
19330 * Invoked by: Common Scheduler
19332 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19333 * @param[in] RgSchUeCb *ue
19334 * @param[in] RgSchDlHqProcCb *hqP
19339 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst
19341 RgSchCmnDlRbAllocInfo *allocInfo,
19343 RgSchDlHqProcCb *hqP
19346 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst(allocInfo, ue, hqP)
19347 RgSchCmnDlRbAllocInfo *allocInfo;
19349 RgSchDlHqProcCb *hqP;
19352 CmLList *schdLnkNode;
19354 TRC2(rgSCHCmnDlAdd2NonSchdRetxLst);
19357 if ( (hqP->sch != (RgSchCmnDlHqProc *)NULLP) &&
19358 (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP)))
19364 schdLnkNode = &hqP->schdLstLnk;
19365 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
19366 cmLListAdd2Tail(&allocInfo->dedAlloc.nonSchdRetxHqPLst, schdLnkNode);
19374 * @brief This function adds the UE to DLRbAllocInfo NonSchdTxRetxLst.
19378 * Function: rgSCHCmnDlAdd2NonSchdTxRetxLst
19379 * Purpose: During RB estimation for TXRETX, if allocation fails
19380 * then appending it to NonSchdTxRetxLst, the further
19381 * action is taken as part of Finalization in
19382 * respective schedulers.
19384 * Invoked by: Common Scheduler
19386 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19387 * @param[in] RgSchUeCb *ue
19388 * @param[in] RgSchDlHqProcCb *hqP
19394 * @brief This function handles the initialisation of DL HARQ/ACK feedback
19395 * timing information for eaach DL subframe.
19399 * Function: rgSCHCmnDlANFdbkInit
19400 * Purpose: Each DL subframe stores the sfn and subframe
19401 * information of UL subframe in which it expects
19402 * HARQ ACK/NACK feedback for this subframe.It
19403 * generates the information based on Downlink
19404 * Association Set Index table.
19406 * Invoked by: Scheduler
19408 * @param[in] RgSchCellCb* cell
19413 PRIVATE S16 rgSCHCmnDlANFdbkInit
19418 PRIVATE S16 rgSCHCmnDlANFdbkInit(cell)
19423 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19424 U8 maxDlSubfrms = cell->numDlSubfrms;
19431 RgSchTddSubfrmInfo ulSubfrmInfo;
19434 TRC2(rgSCHCmnDlANFdbkInit);
19436 ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19437 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19439 /* Generate HARQ ACK/NACK feedback information for each DL sf in a radio frame
19440 * Calculate this information based on DL Association set Index table */
19441 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19443 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19444 RG_SCH_TDD_UL_SUBFRAME)
19446 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19450 for(idx=0; idx < rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19451 numFdbkSubfrms; idx++)
19453 calcSfNum = sfNum - rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19457 calcSfnOffset = RGSCH_CEIL(-calcSfNum, RGSCH_NUM_SUB_FRAMES);
19464 calcSfNum = ((RGSCH_NUM_SUB_FRAMES * calcSfnOffset) + calcSfNum)\
19465 % RGSCH_NUM_SUB_FRAMES;
19467 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19471 else if((ulSubfrmInfo.switchPoints == 2) && (calcSfNum <= \
19472 RG_SCH_CMN_SPL_SUBFRM_6))
19474 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19478 dlIdx = calcSfNum - maxUlSubfrms;
19481 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = sfNum;
19482 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = calcSfnOffset;
19483 cell->subFrms[dlIdx]->dlFdbkInfo.m = idx;
19485 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19488 /* DL subframes in the subsequent radio frames are initialized
19489 * with the previous radio frames */
19490 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;\
19493 sfNum = dlIdx - rgSchTddNumDlSubfrmTbl[ulDlCfgIdx]\
19494 [RGSCH_NUM_SUB_FRAMES-1];
19495 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = \
19496 cell->subFrms[sfNum]->dlFdbkInfo.subframe;
19497 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = \
19498 cell->subFrms[sfNum]->dlFdbkInfo.sfnOffset;
19499 cell->subFrms[dlIdx]->dlFdbkInfo.m = cell->subFrms[sfNum]->dlFdbkInfo.m;
19505 * @brief This function handles the initialization of uplink association
19506 * set information for each DL subframe.
19511 * Function: rgSCHCmnDlKdashUlAscInit
19512 * Purpose: Each DL sf stores the sfn and sf information of UL sf
19513 * in which it expects HQ ACK/NACK trans. It generates the information
19514 * based on k` in UL association set index table.
19516 * Invoked by: Scheduler
19518 * @param[in] RgSchCellCb* cell
19523 PRIVATE S16 rgSCHCmnDlKdashUlAscInit
19528 PRIVATE S16 rgSCHCmnDlKdashUlAscInit(cell)
19533 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19534 U8 maxDlSubfrms = cell->numDlSubfrms;
19540 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19541 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19542 [RGSCH_NUM_SUB_FRAMES-1];
19545 TRC2(rgSCHCmnDlKdashUlAscInit);
19547 /* Generate ACK/NACK offset information for each DL subframe in a radio frame
19548 * Calculate this information based on K` in UL Association Set table */
19549 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19551 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19552 RG_SCH_TDD_UL_SUBFRAME)
19554 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19558 calcSfNum = (sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum] + \
19559 RGSCH_NUM_SUB_FRAMES) % RGSCH_NUM_SUB_FRAMES;
19560 calcSfnOffset = sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum];
19561 if(calcSfnOffset < 0)
19563 calcSfnOffset = RGSCH_CEIL(-calcSfnOffset, RGSCH_NUM_SUB_FRAMES);
19570 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19574 else if((ulSubfrmInfo.switchPoints == 2) &&
19575 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19577 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19581 dlIdx = calcSfNum - maxUlSubfrms;
19584 cell->subFrms[dlIdx]->ulAscInfo.subframe = sfNum;
19585 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset = calcSfnOffset;
19587 /* set dlIdx for which ulAscInfo is updated */
19588 dlPres = dlPres | (1 << dlIdx);
19589 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19592 /* Set Invalid information for which ulAscInfo is not present */
19594 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19597 /* If dlPres is 0, ulAscInfo is not present in that DL index */
19598 if(! ((dlPres >> sfCount)&0x01))
19600 cell->subFrms[sfCount]->ulAscInfo.sfnOffset =
19601 RGSCH_INVALID_INFO;
19602 cell->subFrms[sfCount]->ulAscInfo.subframe =
19603 RGSCH_INVALID_INFO;
19607 /* DL subframes in the subsequent radio frames are initialized
19608 * with the previous radio frames */
19609 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;
19613 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19614 cell->subFrms[dlIdx]->ulAscInfo.subframe =
19615 cell->subFrms[sfNum]->ulAscInfo.subframe;
19616 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset =
19617 cell->subFrms[sfNum]->ulAscInfo.sfnOffset;
19624 * @brief This function initialises the 'Np' value for 'p'
19628 * Function: rgSCHCmnDlNpValInit
19629 * Purpose: To initialise the 'Np' value for each 'p'. It is used
19630 * to find the mapping between nCCE and 'p' and used in
19631 * HARQ ACK/NACK reception.
19633 * Invoked by: Scheduler
19635 * @param[in] RgSchCellCb* cell
19640 PRIVATE S16 rgSCHCmnDlNpValInit
19645 PRIVATE S16 rgSCHCmnDlNpValInit(cell)
19651 TRC2(rgSCHCmnDlNpValInit);
19653 /* Always Np is 0 for p=0 */
19654 cell->rgSchTddNpValTbl[0] = 0;
19656 for(idx=1; idx < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; idx++)
19658 np = cell->bwCfg.dlTotalBw * (idx * RG_SCH_CMN_NUM_SUBCAR - 4);
19659 cell->rgSchTddNpValTbl[idx] = (U8) (np/36);
19666 * @brief This function handles the creation of RACH preamble
19667 * list to queue the preambles and process at the scheduled
19672 * Function: rgSCHCmnDlCreateRachPrmLst
19673 * Purpose: To create RACH preamble list based on RA window size.
19674 * It is used to queue the preambles and process it at the
19677 * Invoked by: Scheduler
19679 * @param[in] RgSchCellCb* cell
19684 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst
19689 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst(cell)
19697 TRC2(rgSCHCmnDlCreateRachPrmLst);
19699 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19701 lstSize = raArrSz * RGSCH_MAX_RA_RNTI_PER_SUBFRM * RGSCH_NUM_SUB_FRAMES;
19703 cell->raInfo.maxRaSize = raArrSz;
19704 ret = rgSCHUtlAllocSBuf(cell->instIdx,
19705 (Data **)(&cell->raInfo.raReqLst), (Size)(lstSize * sizeof(CmLListCp)));
19711 cell->raInfo.lstSize = lstSize;
19718 * @brief This function handles the initialization of RACH Response
19719 * information at each DL subframe.
19723 * Function: rgSCHCmnDlRachInfoInit
19724 * Purpose: Each DL subframe stores the sfn and subframe information of
19725 * possible RACH response allowed for UL subframes. It generates
19726 * the information based on PRACH configuration.
19728 * Invoked by: Scheduler
19730 * @param[in] RgSchCellCb* cell
19735 PRIVATE S16 rgSCHCmnDlRachInfoInit
19740 PRIVATE S16 rgSCHCmnDlRachInfoInit(cell)
19745 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19748 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19749 [RGSCH_NUM_SUB_FRAMES-1];
19751 RgSchTddRachRspLst rachRspLst[3][RGSCH_NUM_SUB_FRAMES];
19759 RgSchTddRachDelInfo *delInfo;
19763 TRC2(rgSCHCmnDlRachInfoInit);
19765 cmMemset((U8 *)rachRspLst, 0, sizeof(rachRspLst));
19767 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19769 /* Include Special subframes */
19770 maxUlSubfrms = maxUlSubfrms + \
19771 rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx].switchPoints;
19772 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19774 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] ==
19775 RG_SCH_TDD_DL_SUBFRAME)
19777 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19781 startWin = (sfNum + RG_SCH_CMN_RARSP_WAIT_PRD + \
19782 ((RgSchCmnCell *)cell->sc.sch)->dl.numRaSubFrms);
19783 endWin = (startWin + cell->rachCfg.raWinSize - 1);
19785 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][startWin%RGSCH_NUM_SUB_FRAMES];
19786 /* Find the next DL subframe starting from Subframe 0 */
19787 if((startSubfrmIdx % RGSCH_NUM_SUB_FRAMES) == 0)
19789 startWin = RGSCH_CEIL(startWin, RGSCH_NUM_SUB_FRAMES);
19790 startWin = startWin * RGSCH_NUM_SUB_FRAMES;
19794 rgSchTddLowDlSubfrmIdxTbl[ulDlCfgIdx][endWin%RGSCH_NUM_SUB_FRAMES];
19795 endWin = (endWin/RGSCH_NUM_SUB_FRAMES) * RGSCH_NUM_SUB_FRAMES \
19797 if(startWin > endWin)
19801 /* Find all the possible RACH Response transmission
19802 * time within the RA window size */
19803 startSubfrmIdx = startWin%RGSCH_NUM_SUB_FRAMES;
19804 for(sfnIdx = startWin/RGSCH_NUM_SUB_FRAMES;
19805 sfnIdx <= endWin/RGSCH_NUM_SUB_FRAMES; sfnIdx++)
19807 if(sfnIdx == endWin/RGSCH_NUM_SUB_FRAMES)
19809 endSubfrmIdx = endWin%RGSCH_NUM_SUB_FRAMES;
19813 endSubfrmIdx = RGSCH_NUM_SUB_FRAMES-1;
19816 /* Find all the possible RACH Response transmission
19817 * time within radio frame */
19818 for(subfrmIdx = startSubfrmIdx;
19819 subfrmIdx <= endSubfrmIdx; subfrmIdx++)
19821 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][subfrmIdx] ==
19822 RG_SCH_TDD_UL_SUBFRAME)
19826 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
19827 /* Find the next DL subframe starting from Subframe 0 */
19828 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
19832 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx], subfrmIdx);
19834 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
19835 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset = sfnIdx;
19836 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].subframe[numSubfrms]
19838 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms++;
19840 startSubfrmIdx = RG_SCH_CMN_SUBFRM_0;
19842 /* Update the subframes to be deleted at this subframe */
19843 /* Get the subframe after the end of RA window size */
19846 sfnOffset = endWin/RGSCH_NUM_SUB_FRAMES;
19849 sfnOffset += raArrSz;
19851 sfnIdx = (endWin/RGSCH_NUM_SUB_FRAMES) % raArrSz;
19853 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx],endSubfrmIdx-1);
19854 if((endSubfrmIdx == RGSCH_NUM_SUB_FRAMES) ||
19855 (rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx] ==
19856 RGSCH_NUM_SUB_FRAMES))
19859 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][RG_SCH_CMN_SUBFRM_0];
19863 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx];
19866 delInfo = &rachRspLst[sfnIdx][subfrmIdx].delInfo;
19867 delInfo->sfnOffset = sfnOffset;
19868 delInfo->subframe[delInfo->numSubfrms] = sfNum;
19869 delInfo->numSubfrms++;
19871 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19874 ret = rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz);
19884 * @brief This function handles the initialization of PHICH information
19885 * for each DL subframe based on PHICH table.
19889 * Function: rgSCHCmnDlPhichOffsetInit
19890 * Purpose: Each DL subf stores the sfn and subf information of UL subframe
19891 * for which it trnsmts PHICH in this subframe. It generates the information
19892 * based on PHICH table.
19894 * Invoked by: Scheduler
19896 * @param[in] RgSchCellCb* cell
19901 PRIVATE S16 rgSCHCmnDlPhichOffsetInit
19906 PRIVATE S16 rgSCHCmnDlPhichOffsetInit(cell)
19911 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19912 U8 maxDlSubfrms = cell->numDlSubfrms;
19919 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19920 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19921 [RGSCH_NUM_SUB_FRAMES-1];
19923 TRC2(rgSCHCmnDlPhichOffsetInit);
19925 /* Generate PHICH offset information for each DL subframe in a radio frame
19926 * Calculate this information based on K in PHICH table */
19927 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19929 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19930 RG_SCH_TDD_UL_SUBFRAME)
19932 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19936 calcSfNum = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) % \
19937 RGSCH_NUM_SUB_FRAMES;
19938 calcSfnOffset = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) / \
19939 RGSCH_NUM_SUB_FRAMES;
19941 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19945 else if((ulSubfrmInfo.switchPoints == 2) &&
19946 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19948 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19952 dlIdx = calcSfNum - maxUlSubfrms;
19955 cell->subFrms[dlIdx]->phichOffInfo.subframe = sfNum;
19956 cell->subFrms[dlIdx]->phichOffInfo.numSubfrms = 1;
19958 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset = calcSfnOffset;
19960 /* set dlIdx for which phich offset is updated */
19961 dlPres = dlPres | (1 << dlIdx);
19962 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19965 /* Set Invalid information for which phich offset is not present */
19967 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19970 /* If dlPres is 0, phich offset is not present in that DL index */
19971 if(! ((dlPres >> sfCount)&0x01))
19973 cell->subFrms[sfCount]->phichOffInfo.sfnOffset =
19974 RGSCH_INVALID_INFO;
19975 cell->subFrms[sfCount]->phichOffInfo.subframe =
19976 RGSCH_INVALID_INFO;
19977 cell->subFrms[sfCount]->phichOffInfo.numSubfrms = 0;
19981 /* DL subframes in the subsequent radio frames are
19982 * initialized with the previous radio frames */
19983 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms;
19984 dlIdx < maxDlSubfrms; dlIdx++)
19987 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19989 cell->subFrms[dlIdx]->phichOffInfo.subframe =
19990 cell->subFrms[sfNum]->phichOffInfo.subframe;
19992 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset =
19993 cell->subFrms[sfNum]->phichOffInfo.sfnOffset;
20000 * @brief Updation of Sch vars per TTI.
20004 * Function: rgSCHCmnUpdVars
20005 * Purpose: Updation of Sch vars per TTI.
20007 * @param[in] RgSchCellCb *cell
20012 PUBLIC Void rgSCHCmnUpdVars
20017 PUBLIC Void rgSCHCmnUpdVars(cell)
20021 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
20022 CmLteTimingInfo timeInfo;
20025 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
20028 TRC2(rgSCHCmnUpdVars);
20030 /* ccpu00132654-ADD- Initializing all the indices in every subframe*/
20031 rgSCHCmnInitVars(cell);
20033 idx = (cell->crntTime.slot + TFU_ULCNTRL_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20034 /* Calculate the UL scheduling subframe idx based on the
20036 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][idx] != 0)
20038 /* PUSCH transmission is based on offset from DL
20039 * PDCCH scheduling */
20040 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20041 ulSubframe = rgSchTddPuschTxKTbl[ulDlCfgIdx][timeInfo.subframe];
20042 /* Add the DCI-0 to PUSCH time to get the time of UL subframe */
20043 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, ulSubframe);
20045 cellUl->schdTti = timeInfo.sfn * 10 + timeInfo.subframe;
20047 /* Fetch the corresponding UL subframe Idx in UL sf array */
20048 cellUl->schdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20049 /* Fetch the corresponding UL Harq Proc ID */
20050 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20051 cellUl->schdTime = timeInfo;
20053 Mval = rgSchTddPhichMValTbl[ulDlCfgIdx][idx];
20056 /* Fetch the tx time for DL HIDCI-0 */
20057 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20058 /* Fetch the corresponding n-k tx time of PUSCH */
20059 cellUl->hqFdbkIdx[0] = rgSCHCmnGetPhichUlSfIdx(&timeInfo, cell);
20060 /* Retx will happen according to the Pusch k table */
20061 cellUl->reTxIdx[0] = cellUl->schdIdx;
20063 if(ulDlCfgIdx == 0)
20065 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[0] */
20066 cellUl->reTxIdx[0] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20067 cellUl->hqFdbkIdx[0]);
20070 /* At Idx 1 store the UL SF adjacent(left) to the UL SF
20072 cellUl->hqFdbkIdx[1] = (cellUl->hqFdbkIdx[0]-1 +
20073 cellUl->numUlSubfrms) % cellUl->numUlSubfrms;
20074 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[1] */
20075 cellUl->reTxIdx[1] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20076 cellUl->hqFdbkIdx[1]);
20081 idx = (cell->crntTime.slot + TFU_RECPREQ_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20082 if (rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] == RG_SCH_TDD_UL_SUBFRAME)
20084 RGSCHCMNADDTOCRNTTIME(cell->crntTime, timeInfo, TFU_RECPREQ_DLDELTA)
20085 cellUl->rcpReqIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20087 idx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) % RGSCH_NUM_SUB_FRAMES;
20089 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
20090 special subframe */
20091 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] != RG_SCH_TDD_UL_SUBFRAME)
20093 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20094 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20095 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, msg3Subfrm);
20096 cellUl->msg3SchdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20097 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20100 if(!rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][idx])
20102 cellUl->spsUlRsrvIdx = RGSCH_INVALID_INFO;
20106 /* introduce some reuse with above code? */
20108 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20109 //offst = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20110 offst = rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][timeInfo.subframe];
20111 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, offst);
20112 cellUl->spsUlRsrvIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20113 /* The harq proc continues to be accessed and used the same delta before
20114 * actual data occurance, and hence use the same idx */
20115 cellUl->spsUlRsrvHqProcIdx = cellUl->schdHqProcIdx;
20119 /* RACHO: update cmn sched specific RACH variables,
20120 * mainly the prachMaskIndex */
20121 rgSCHCmnUpdRachParam(cell);
20127 * @brief To get 'p' value from nCCE.
20131 * Function: rgSCHCmnGetPValFrmCCE
20132 * Purpose: Gets 'p' value for HARQ ACK/NACK reception from CCE.
20134 * @param[in] RgSchCellCb *cell
20135 * @param[in] U8 cce
20140 PUBLIC U8 rgSCHCmnGetPValFrmCCE
20146 PUBLIC U8 rgSCHCmnGetPValFrmCCE(cell, cce)
20152 TRC2(rgSCHCmnGetPValFrmCCE);
20154 for(i=1; i < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; i++)
20156 if(cce < cell->rgSchTddNpValTbl[i])
20165 /***********************************************************
20167 * Func : rgSCHCmnUlAdapRetx
20169 * Desc : Adaptive retransmission for an allocation.
20177 **********************************************************/
20179 PRIVATE Void rgSCHCmnUlAdapRetx
20181 RgSchUlAlloc *alloc,
20182 RgSchUlHqProcCb *proc
20185 PRIVATE Void rgSCHCmnUlAdapRetx(alloc, proc)
20186 RgSchUlAlloc *alloc;
20187 RgSchUlHqProcCb *proc;
20190 TRC2(rgSCHCmnUlAdapRetx);
20192 rgSCHUhmRetx(proc, alloc);
20194 if (proc->rvIdx != 0)
20196 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[proc->rvIdx];
20201 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
20207 * @brief Scheduler invocation per TTI.
20211 * Function: rgSCHCmnHdlUlInactUes
20214 * Invoked by: Common Scheduler
20216 * @param[in] RgSchCellCb *cell
20220 PRIVATE Void rgSCHCmnHdlUlInactUes
20225 PRIVATE Void rgSCHCmnHdlUlInactUes(cell)
20229 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20230 CmLListCp ulInactvLst;
20231 TRC2(rgSCHCmnHdlUlInactUes);
20232 /* Get a List of Inactv UEs for UL*/
20233 cmLListInit(&ulInactvLst);
20235 /* Trigger Spfc Schedulers with Inactive UEs */
20236 rgSCHMeasGapANRepGetUlInactvUe (cell, &ulInactvLst);
20237 /* take care of this in UL retransmission */
20238 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInactvLst);
20244 * @brief Scheduler invocation per TTI.
20248 * Function: rgSCHCmnHdlDlInactUes
20251 * Invoked by: Common Scheduler
20253 * @param[in] RgSchCellCb *cell
20257 PRIVATE Void rgSCHCmnHdlDlInactUes
20262 PRIVATE Void rgSCHCmnHdlDlInactUes(cell)
20266 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20267 CmLListCp dlInactvLst;
20268 TRC2(rgSCHCmnHdlDlInactUes);
20269 /* Get a List of Inactv UEs for DL */
20270 cmLListInit(&dlInactvLst);
20272 /* Trigger Spfc Schedulers with Inactive UEs */
20273 rgSCHMeasGapANRepGetDlInactvUe (cell, &dlInactvLst);
20275 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInactvLst);
20279 /* RACHO: Rach handover functions start here */
20280 /***********************************************************
20282 * Func : rgSCHCmnUeIdleExdThrsld
20284 * Desc : RETURN ROK if UE has been idle more
20293 **********************************************************/
20295 PRIVATE S16 rgSCHCmnUeIdleExdThrsld
20301 PRIVATE S16 rgSCHCmnUeIdleExdThrsld(cell, ue)
20306 /* Time difference in subframes */
20307 U32 sfDiff = RGSCH_CALC_SF_DIFF(cell->crntTime, ue->ul.ulTransTime);
20309 TRC2(rgSCHCmnUeIdleExdThrsld);
20311 if (sfDiff > (U32)RG_SCH_CMN_UE_IDLE_THRSLD(ue))
20323 * @brief Scheduler processing for Ded Preambles on cell configuration.
20327 * Function : rgSCHCmnCfgRachDedPrm
20329 * This function does requisite initialisation
20330 * for RACH Ded Preambles.
20333 * @param[in] RgSchCellCb *cell
20337 PRIVATE Void rgSCHCmnCfgRachDedPrm
20342 PRIVATE Void rgSCHCmnCfgRachDedPrm(cell)
20346 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20347 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20350 TRC2(rgSCHCmnCfgRachDedPrm);
20352 if (cell->macPreambleSet.pres == NOTPRSNT)
20356 cellSch->rachCfg.numDedPrm = cell->macPreambleSet.size;
20357 cellSch->rachCfg.dedPrmStart = cell->macPreambleSet.start;
20358 /* Initialize handover List */
20359 cmLListInit(&cellSch->rachCfg.hoUeLst);
20360 /* Initialize pdcch Order List */
20361 cmLListInit(&cellSch->rachCfg.pdcchOdrLst);
20363 /* Intialize the rapId to UE mapping structure */
20364 for (cnt = 0; cnt<cellSch->rachCfg.numDedPrm; cnt++)
20366 cellSch->rachCfg.rapIdMap[cnt].rapId = cellSch->rachCfg.dedPrmStart + \
20368 cmLListInit(&cellSch->rachCfg.rapIdMap[cnt].assgndUes);
20370 /* Perform Prach Mask Idx, remDedPrm, applFrm initializations */
20371 /* Set remDedPrm as numDedPrm */
20372 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20373 /* Initialize applFrm */
20374 cellSch->rachCfg.prachMskIndx = 0;
20375 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_EVEN)
20377 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + \
20378 (cell->crntTime.sfn % 2)) % RGSCH_MAX_SFN;
20381 else if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ODD)
20383 if((cell->crntTime.sfn%2) == 0)
20385 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + 1)\
20392 cellSch->rachCfg.applFrm.sfn = cell->crntTime.sfn;
20394 /* Initialize cellSch->rachCfg.applFrm as >= crntTime.
20395 * This is because of RGSCH_CALC_SF_DIFF logic */
20396 if (cellSch->rachCfg.applFrm.sfn == cell->crntTime.sfn)
20398 while (cellSch->rachCfg.prachMskIndx < cell->rachCfg.raOccasion.size)
20400 if (cell->crntTime.slot <\
20401 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx])
20405 cellSch->rachCfg.prachMskIndx++;
20407 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size)
20409 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20411 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) %\
20416 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) %\
20419 cellSch->rachCfg.prachMskIndx = 0;
20421 cellSch->rachCfg.applFrm.slot = \
20422 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20426 cellSch->rachCfg.applFrm.slot = \
20427 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20430 /* Note first param to this macro should always be the latest in time */
20431 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20432 while (sfDiff <= gap)
20434 rgSCHCmnUpdNxtPrchMskIdx(cell);
20435 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20442 * @brief Updates the PRACH MASK INDEX.
20446 * Function: rgSCHCmnUpdNxtPrchMskIdx
20447 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20448 * CFG is always >= "n"+"DELTA", where "n" is the crntTime
20449 * of the cell. If not, applFrm is updated to the next avl
20450 * PRACH oppurtunity as per the PRACH Cfg Index configuration.
20453 * Invoked by: Common Scheduler
20455 * @param[in] RgSchCellCb *cell
20459 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx
20464 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx(cell)
20468 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20469 TRC2(rgSCHCmnUpdNxtPrchMskIdx);
20471 /* Determine the next prach mask Index */
20472 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size - 1)
20474 /* PRACH within applFrm.sfn are done, go to next AVL sfn */
20475 cellSch->rachCfg.prachMskIndx = 0;
20476 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20478 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) % \
20481 else/* RGR_SFN_EVEN or RGR_SFN_ODD */
20483 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) % \
20486 cellSch->rachCfg.applFrm.slot = cell->rachCfg.raOccasion.\
20489 else /* applFrm.sfn is still valid */
20491 cellSch->rachCfg.prachMskIndx += 1;
20492 if ( cellSch->rachCfg.prachMskIndx < RGR_MAX_SUBFRAME_NUM )
20494 cellSch->rachCfg.applFrm.slot = \
20495 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20502 * @brief Updates the Ded preamble RACH parameters
20507 * Function: rgSCHCmnUpdRachParam
20508 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20509 * CFG is always >= "n"+"6"+"DELTA", where "n" is the crntTime
20510 * of the cell. If not, applFrm is updated to the next avl
20511 * PRACH oppurtunity as per the PRACH Cfg Index configuration,
20512 * accordingly the "remDedPrm" is reset to "numDedPrm" and
20513 * "prachMskIdx" field is updated as per "applFrm".
20516 * Invoked by: Common Scheduler
20518 * @param[in] RgSchCellCb *cell
20522 PRIVATE Void rgSCHCmnUpdRachParam
20527 PRIVATE Void rgSCHCmnUpdRachParam(cell)
20532 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20533 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20535 TRC2(rgSCHCmnUpdRachParam);
20537 if (cell->macPreambleSet.pres == NOTPRSNT)
20541 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, \
20545 /* applFrm is still a valid next Prach Oppurtunity */
20548 rgSCHCmnUpdNxtPrchMskIdx(cell);
20549 /* Reset remDedPrm as numDedPrm */
20550 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20556 * @brief Dedicated Preamble allocation function.
20560 * Function: rgSCHCmnAllocPOParam
20561 * Purpose: Allocate pdcch, rapId and PrachMskIdx.
20562 * Set mapping of UE with the allocated rapId.
20564 * Invoked by: Common Scheduler
20566 * @param[in] RgSchCellCb *cell
20567 * @param[in] RgSchDlSf *dlSf
20568 * @param[in] RgSchUeCb *ue
20569 * @param[out] RgSchPdcch **pdcch
20570 * @param[out] U8 *rapId
20571 * @param[out] U8 *prachMskIdx
20575 PRIVATE S16 rgSCHCmnAllocPOParam
20580 RgSchPdcch **pdcch,
20585 PRIVATE S16 rgSCHCmnAllocPOParam(cell, dlSf, ue, pdcch, rapId, prachMskIdx)
20589 RgSchPdcch **pdcch;
20595 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20596 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20598 TRC2(rgSCHCmnAllocPOParam);
20600 if (cell->macPreambleSet.pres == PRSNT_NODEF)
20602 if (cellSch->rachCfg.remDedPrm == 0)
20606 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20607 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20611 /* The stored prachMskIdx is the index of PRACH Oppurtunities in
20612 * raOccasions.subframes[].
20613 * Converting the same to the actual PRACHMskIdx to be transmitted. */
20614 *prachMskIdx = cellSch->rachCfg.prachMskIndx + 1;
20615 /* Distribution starts from dedPrmStart till dedPrmStart + numDedPrm */
20616 *rapId = cellSch->rachCfg.dedPrmStart +
20617 cellSch->rachCfg.numDedPrm - cellSch->rachCfg.remDedPrm;
20618 cellSch->rachCfg.remDedPrm--;
20619 /* Map UE with the allocated RapId */
20620 ueDl->rachInfo.asgnOppr = cellSch->rachCfg.applFrm;
20621 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, cellSch->rachCfg.rapIdMap, (*rapId - cellSch->rachCfg.dedPrmStart));
20622 cmLListAdd2Tail(&cellSch->rachCfg.rapIdMap[*rapId - cellSch->rachCfg.dedPrmStart].assgndUes,
20623 &ueDl->rachInfo.rapIdLnk);
20624 ueDl->rachInfo.rapIdLnk.node = (PTR)ue;
20625 ueDl->rachInfo.poRapId = *rapId;
20627 else /* if dedicated preambles not configured */
20629 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20630 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20642 * @brief Dowlink Scheduling Handler.
20646 * Function: rgSCHCmnGenPdcchOrder
20647 * Purpose: For each UE in PO Q, grab a PDCCH,
20648 * get an available ded RapId and fill PDCCH
20649 * with PO information.
20651 * Invoked by: Common Scheduler
20653 * @param[in] RgSchCellCb *cell
20654 * @param[in] RgSchDlSf *dlSf
20658 PRIVATE Void rgSCHCmnGenPdcchOrder
20664 PRIVATE Void rgSCHCmnGenPdcchOrder(cell, dlSf)
20669 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20670 CmLList *node = cellSch->rachCfg.pdcchOdrLst.first;
20674 RgSchPdcch *pdcch = NULLP;
20676 TRC2(rgSCHCmnGenPdcchOrder);
20680 ue = (RgSchUeCb *)node->node;
20682 /* Skip sending for this subframe is Measuring or inActive in UL due
20683 * to MeasGap or inactie due to DRX
20685 if ((ue->measGapCb.isMeasuring == TRUE) ||
20686 (ue->ul.ulInactvMask & RG_MEASGAP_INACTIVE) ||
20687 (ue->isDrxEnabled &&
20688 ue->dl.dlInactvMask & RG_DRX_INACTIVE)
20693 if (rgSCHCmnAllocPOParam(cell, dlSf, ue, &pdcch, &rapId,\
20694 &prachMskIdx) != ROK)
20696 /* No More rapIds left for the valid next avl Oppurtunity.
20697 * Unsatisfied UEs here would be given a chance, when the
20698 * prach Mask Index changes as per rachUpd every TTI */
20700 /* PDDCH can also be ordered with rapId=0, prachMskIdx=0
20701 * so that UE triggers a RACH procedure with non-dedicated preamble.
20702 * But the implementation here does not do this. Instead, the "break"
20703 * here implies, that PDCCH Odr always given with valid rapId!=0,
20704 * prachMskIdx!=0 if dedicated preambles are configured.
20705 * If not configured, then trigger a PO with rapId=0,prchMskIdx=0*/
20708 /* Fill pdcch with pdcch odr information */
20709 rgSCHCmnFillPdcchOdr2Sf(cell, ue, pdcch, rapId, prachMskIdx);
20710 /* Remove this UE from the PDCCH ORDER QUEUE */
20711 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20712 /* Reset UE's power state */
20713 rgSCHPwrUeReset(cell, ue);
20720 * @brief This function add UE to PdcchOdr Q if not already present.
20724 * Function: rgSCHCmnDlAdd2PdcchOdrQ
20727 * Invoked by: CMN Scheduler
20729 * @param[in] RgSchCellCb* cell
20730 * @param[in] RgSchUeCb* ue
20735 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ
20741 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ(cell, ue)
20746 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20747 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20749 TRC2(rgSCHCmnDlAdd2PdcchOdrQ);
20751 if (ueDl->rachInfo.poLnk.node == NULLP)
20753 cmLListAdd2Tail(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20754 ueDl->rachInfo.poLnk.node = (PTR)ue;
20761 * @brief This function rmvs UE to PdcchOdr Q if not already present.
20765 * Function: rgSCHCmnDlRmvFrmPdcchOdrQ
20768 * Invoked by: CMN Scheduler
20770 * @param[in] RgSchCellCb* cell
20771 * @param[in] RgSchUeCb* ue
20776 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ
20782 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue)
20787 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20788 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20790 TRC2(rgSCHCmnDlRmvFrmPdcchOdrQ);
20792 cmLListDelFrm(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20793 ueDl->rachInfo.poLnk.node = NULLP;
20798 * @brief Fill pdcch with PDCCH order information.
20802 * Function: rgSCHCmnFillPdcchOdr2Sf
20803 * Purpose: Fill PDCCH with PDCCH order information,
20805 * Invoked by: Common Scheduler
20807 * @param[in] RgSchUeCb *ue
20808 * @param[in] RgSchPdcch *pdcch
20809 * @param[in] U8 rapId
20810 * @param[in] U8 prachMskIdx
20814 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf
20823 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf(ue, pdcch, rapId, prachMskIdx)
20831 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
20833 TRC2(rgSCHCmnFillPdcchOdr2Sf);
20835 pdcch->rnti = ue->ueId;
20836 pdcch->dci.dciFormat = TFU_DCI_FORMAT_1A;
20837 pdcch->dci.u.format1aInfo.isPdcchOrder = TRUE;
20838 pdcch->dci.u.format1aInfo.t.pdcchOrder.preambleIdx = rapId;
20839 pdcch->dci.u.format1aInfo.t.pdcchOrder.prachMaskIdx = prachMskIdx;
20841 /* Request for APer CQI immediately after PDCCH Order */
20842 /* CR ccpu00144525 */
20844 if(ue->dl.ueDlCqiCfg.aprdCqiCfg.pres)
20846 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
20847 acqiCb->aCqiTrigWt = 0;
20856 * @brief UE deletion for scheduler.
20860 * Function : rgSCHCmnDelRachInfo
20862 * This functions deletes all scheduler information
20863 * pertaining to an UE.
20865 * @param[in] RgSchCellCb *cell
20866 * @param[in] RgSchUeCb *ue
20870 PRIVATE Void rgSCHCmnDelRachInfo
20876 PRIVATE Void rgSCHCmnDelRachInfo(cell, ue)
20881 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20882 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20885 TRC2(rgSCHCmnDelRachInfo);
20887 if (ueDl->rachInfo.poLnk.node)
20889 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20891 if (ueDl->rachInfo.hoLnk.node)
20893 cmLListDelFrm(&cellSch->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
20894 ueDl->rachInfo.hoLnk.node = NULLP;
20896 if (ueDl->rachInfo.rapIdLnk.node)
20898 rapIdIdx = ueDl->rachInfo.poRapId - cellSch->rachCfg.dedPrmStart;
20899 cmLListDelFrm(&cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes,
20900 &ueDl->rachInfo.rapIdLnk);
20901 ueDl->rachInfo.rapIdLnk.node = NULLP;
20907 * @brief This function retrieves the ue which has sent this raReq
20908 * and it allocates grant for UEs undergoing (for which RAR
20909 * is being generated) HandOver/PdcchOrder.
20914 * Function: rgSCHCmnHdlHoPo
20915 * Purpose: This function retrieves the ue which has sent this raReq
20916 * and it allocates grant for UEs undergoing (for which RAR
20917 * is being generated) HandOver/PdcchOrder.
20919 * Invoked by: Common Scheduler
20921 * @param[in] RgSchCellCb *cell
20922 * @param[out] CmLListCp *raRspLst
20923 * @param[in] RgSchRaReqInfo *raReq
20928 PRIVATE Void rgSCHCmnHdlHoPo
20931 CmLListCp *raRspLst,
20932 RgSchRaReqInfo *raReq
20935 PRIVATE Void rgSCHCmnHdlHoPo(cell, raRspLst, raReq)
20937 CmLListCp *raRspLst;
20938 RgSchRaReqInfo *raReq;
20941 RgSchUeCb *ue = raReq->ue;
20942 TRC2(rgSCHCmnHdlHoPo);
20944 if ( ue->isDrxEnabled )
20946 rgSCHDrxDedRa(cell,ue);
20948 rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq);
20953 * @brief This function retrieves the UE which has sent this raReq
20954 * for handover case.
20959 * Function: rgSCHCmnGetHoUe
20960 * Purpose: This function retrieves the UE which has sent this raReq
20961 * for handover case.
20963 * Invoked by: Common Scheduler
20965 * @param[in] RgSchCellCb *cell
20966 * @param[in] RgSchRaReqInfo *raReq
20967 * @return RgSchUeCb*
20971 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe
20977 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe(cell, rapId)
20982 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20986 RgSchCmnDlUe *ueDl;
20987 TRC2(rgSCHCmnGetHoUe);
20989 ueLst = &cellSch->rachCfg.hoUeLst;
20990 node = ueLst->first;
20993 ue = (RgSchUeCb *)node->node;
20995 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20996 if (ueDl->rachInfo.hoRapId == rapId)
21005 PRIVATE Void rgSCHCmnDelDedPreamble
21011 PRIVATE rgSCHCmnDelDedPreamble(cell, preambleId)
21016 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21020 RgSchCmnDlUe *ueDl;
21021 TRC2(rgSCHCmnDelDedPreamble);
21023 ueLst = &cellSch->rachCfg.hoUeLst;
21024 node = ueLst->first;
21027 ue = (RgSchUeCb *)node->node;
21029 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21030 if (ueDl->rachInfo.hoRapId == preambleId)
21032 cmLListDelFrm(ueLst, &ueDl->rachInfo.hoLnk);
21033 ueDl->rachInfo.hoLnk.node = (PTR)NULLP;
21039 * @brief This function retrieves the UE which has sent this raReq
21040 * for PDCCh Order case.
21045 * Function: rgSCHCmnGetPoUe
21046 * Purpose: This function retrieves the UE which has sent this raReq
21047 * for PDCCH Order case.
21049 * Invoked by: Common Scheduler
21051 * @param[in] RgSchCellCb *cell
21052 * @param[in] RgSchRaReqInfo *raReq
21053 * @return RgSchUeCb*
21057 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe
21061 CmLteTimingInfo timingInfo
21064 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe(cell, rapId, timingInfo)
21067 CmLteTimingInfo timingInfo;
21070 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21074 RgSchCmnDlUe *ueDl;
21076 TRC2(rgSCHCmnGetPoUe);
21078 rapIdIdx = rapId -cellSch->rachCfg.dedPrmStart;
21079 ueLst = &cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes;
21080 node = ueLst->first;
21083 ue = (RgSchUeCb *)node->node;
21085 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21086 /* Remove UEs irrespective.
21087 * Old UE associations are removed.*/
21088 cmLListDelFrm(ueLst, &ueDl->rachInfo.rapIdLnk);
21089 ueDl->rachInfo.rapIdLnk.node = (PTR)NULLP;
21090 if (RGSCH_TIMEINFO_SAME(ueDl->rachInfo.asgnOppr, timingInfo))
21101 * @brief This function returns the valid UL cqi for a given UE.
21105 * Function: rgSCHCmnUlGetCqi
21106 * Purpose: This function returns the "valid UL cqi" for a given UE
21107 * based on UE category
21109 * Invoked by: Scheduler
21111 * @param[in] RgSchUeCb *ue
21112 * @param[in] U8 ueCtgy
21116 PUBLIC U8 rgSCHCmnUlGetCqi
21120 CmLteUeCategory ueCtgy
21123 PUBLIC U8 rgSCHCmnUlGetCqi(cell, ue, ueCtgy)
21126 CmLteUeCategory ueCtgy;
21129 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21132 TRC2(rgSCHCmnUlGetCqi);
21134 cqi = ueUl->maxUlCqi;
21136 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21137 (ueUl->validUlCqi > ueUl->maxUlCqi)))
21139 cqi = ueUl->validUlCqi;
21142 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21143 (ueUl->crntUlCqi[0] > ueUl->maxUlCqi )))
21145 cqi = ueUl->crntUlCqi[0];
21149 }/* End of rgSCHCmnUlGetCqi */
21151 /***********************************************************
21153 * Func : rgSCHCmnUlRbAllocForPoHoUe
21155 * Desc : Do uplink RB allocation for a HO/PO UE.
21159 * Notes: Note that as of now, for retx, maxRb
21160 * is not considered. Alternatives, such
21161 * as dropping retx if it crosses maxRb
21162 * could be considered.
21166 **********************************************************/
21168 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe
21176 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, maxRb)
21183 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21184 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21185 U8 sbSize = cellUl->sbSize;
21186 U32 maxBits = ue->ul.maxBytesPerUePerTti*8;
21188 RgSchUlAlloc *alloc;
21198 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->msg3SchdHqProcIdx];
21199 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
21201 TRC2(rgSCHCmnUlRbAllocForPoHoUe);
21202 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
21206 /*MS_WORKAROUND for HO ccpu00121116*/
21207 cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
21208 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend], cqi);
21209 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21210 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
21211 while(iMcs > RG_SCH_CMN_MAX_MSG3_IMCS)
21214 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21215 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg);
21217 /* Filling the modorder in the grant structure*/
21218 RG_SCH_UL_MCS_TO_MODODR(iMcs,modOdr);
21219 if (!cell->isCpUlExtend)
21221 eff = rgSchCmnNorUlEff[0][iTbs];
21225 eff = rgSchCmnExtUlEff[0][iTbs];
21228 bits = ueUl->alloc.reqBytes * 8;
21230 #if (ERRCLASS & ERRCLS_DEBUG)
21237 if (bits < rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs))
21240 nPrb = numSb * sbSize;
21244 if (bits > maxBits)
21247 nPrb = bits * 1024 / eff / RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
21252 numSb = nPrb / sbSize;
21256 /*ccpu00128775:MOD-Change to get upper threshold nPrb*/
21257 nPrb = RGSCH_CEIL((RGSCH_CEIL(bits * 1024, eff)),
21258 RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl));
21263 numSb = RGSCH_DIV_ROUND(nPrb, sbSize);
21268 alloc = rgSCHCmnUlSbAlloc(sf, (U8)RGSCH_MIN(numSb, cellUl->maxSbPerUe),\
21270 if (alloc == NULLP)
21272 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
21273 "rgSCHCmnUlRbAllocForPoHoUe(): Could not get UlAlloc");
21276 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
21278 /* Filling the modorder in the grant structure start*/
21279 alloc->grnt.modOdr = (TfuModScheme) modOdr;
21280 alloc->grnt.iMcs = iMcs;
21281 alloc->grnt.iMcsCrnt = iMcsCrnt;
21282 alloc->grnt.hop = 0;
21283 /* Fix for ccpu00123915*/
21284 alloc->forMsg3 = TRUE;
21285 alloc->hqProc = proc;
21286 alloc->hqProc->ulSfIdx = cellUl->msg3SchdIdx;
21288 alloc->rnti = ue->ueId;
21289 /* updating initNumRbs in case of HO */
21291 ue->initNumRbs = alloc->grnt.numRb;
21293 ueUl->alloc.alloc = alloc;
21294 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
21295 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
21296 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
21297 /* MS_WORKAROUND for HO ccpu00121124*/
21298 /*[Adi temp change] Need to fil modOdr */
21299 RG_SCH_UL_MCS_TO_MODODR(alloc->grnt.iMcsCrnt,alloc->grnt.modOdr);
21300 rgSCHUhmNewTx(proc, ueUl->hqEnt.maxHqRetx, alloc);
21301 /* No grant attr recorded now */
21306 * @brief This function allocates grant for UEs undergoing (for which RAR
21307 * is being generated) HandOver/PdcchOrder.
21312 * Function: rgSCHCmnAllocPoHoGrnt
21313 * Purpose: This function allocates grant for UEs undergoing (for which RAR
21314 * is being generated) HandOver/PdcchOrder.
21316 * Invoked by: Common Scheduler
21318 * @param[in] RgSchCellCb *cell
21319 * @param[out] CmLListCp *raRspLst,
21320 * @param[in] RgSchUeCb *ue
21321 * @param[in] RgSchRaReqInfo *raReq
21326 PRIVATE Void rgSCHCmnAllocPoHoGrnt
21329 CmLListCp *raRspLst,
21331 RgSchRaReqInfo *raReq
21334 PRIVATE Void rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq)
21336 CmLListCp *raRspLst;
21338 RgSchRaReqInfo *raReq;
21341 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21342 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21344 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
21346 TRC2(rgSCHCmnAllocPoHoGrnt);
21348 /* Clearing previous allocs if any*/
21349 rgSCHCmnUlUeDelAllocs(cell, ue);
21350 /* Fix : syed allocs are limited */
21351 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
21355 ueUl->alloc.reqBytes = RG_SCH_MIN_GRNT_HOPO;
21356 if (rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, RGSCH_MAX_UL_RB) != ROK)
21361 /* Fill grant information */
21362 grnt = &ueUl->alloc.alloc->grnt;
21367 RLOG_ARG1(L_ERROR,DBG_INSTID,cell->instIdx, "Failed to get"
21368 "the grant for HO/PDCCH Order. CRNTI:%d",ue->ueId);
21371 ue->ul.rarGrnt.rapId = raReq->raReq.rapId;
21372 ue->ul.rarGrnt.hop = grnt->hop;
21373 ue->ul.rarGrnt.rbStart = grnt->rbStart;
21374 ue->ul.rarGrnt.numRb = grnt->numRb;
21375 ue->ul.rarGrnt.tpc = grnt->tpc;
21376 ue->ul.rarGrnt.iMcsCrnt = grnt->iMcsCrnt;
21377 ue->ul.rarGrnt.ta.pres = TRUE;
21378 ue->ul.rarGrnt.ta.val = raReq->raReq.ta;
21379 ue->ul.rarGrnt.datSz = grnt->datSz;
21380 if((sf->numACqiCount < RG_SCH_MAX_ACQI_PER_ULSF) && (RG_SCH_APCQI_NO != ue->dl.reqForCqi))
21384 /* Send two bits cqireq field if more than one cells are configured else one*/
21385 for (idx = 1;idx < CM_LTE_MAX_CELLS;idx++)
21387 if (ue->cellInfo[idx] != NULLP)
21389 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21393 if (idx == CM_LTE_MAX_CELLS)
21396 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21398 ue->dl.reqForCqi = RG_SCH_APCQI_NO;
21399 sf->numACqiCount++;
21403 ue->ul.rarGrnt.cqiReqBit = 0;
21405 /* Attach Ho/Po allocation to RAR Rsp cont free Lst */
21406 cmLListAdd2Tail(raRspLst, &ue->ul.rarGrnt.raRspLnk);
21407 ue->ul.rarGrnt.raRspLnk.node = (PTR)ue;
21413 * @brief This is a utility function to set the fields in
21414 * an UL harq proc which is identified for non-adaptive retx
21418 * Function: rgSCHCmnUlNonadapRetx
21419 * Purpose: Sets the fields in UL Harq proc for non-adaptive retx
21421 * @param[in] RgSchCmnUlCell *cellUl
21422 * @param[out] RgSchUlAlloc *alloc
21423 * @param[in] U8 idx
21429 PRIVATE Void rgSCHCmnUlNonadapRetx
21431 RgSchCmnUlCell *cellUl,
21432 RgSchUlAlloc *alloc,
21436 PRIVATE Void rgSCHCmnUlNonadapRetx(cellUl, alloc, idx)
21437 RgSchCmnUlCell *cellUl;
21438 RgSchUlAlloc *alloc;
21442 TRC2(rgSCHCmnUlNonadapRetx);
21443 rgSCHUhmRetx(alloc->hqProc, alloc);
21445 /* Update alloc to retx */
21446 alloc->hqProc->isRetx = TRUE;
21447 alloc->hqProc->ulSfIdx = cellUl->reTxIdx[idx];
21449 if (alloc->hqProc->rvIdx != 0)
21451 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[alloc->hqProc->rvIdx];
21455 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
21457 alloc->grnt.isRtx = TRUE;
21458 alloc->pdcch = NULLP;
21463 * @brief Check if 2 allocs overlap
21467 * Function : rgSCHCmnUlAllocsOvrLap
21469 * - Return TRUE if alloc1 and alloc2 overlap.
21471 * @param[in] RgSchUlAlloc *alloc1
21472 * @param[in] RgSchUlAlloc *alloc2
21477 PRIVATE Bool rgSCHCmnUlAllocsOvrLap
21479 RgSchUlAlloc *alloc1,
21480 RgSchUlAlloc *alloc2
21483 PRIVATE Bool rgSCHCmnUlAllocsOvrLap(alloc1, alloc2)
21484 RgSchUlAlloc *alloc1;
21485 RgSchUlAlloc *alloc2;
21489 TRC2(rgSCHCmnUlAllocsOvrLap);
21491 if (((alloc1->sbStart >= alloc2->sbStart) &&
21492 (alloc1->sbStart <= alloc2->sbStart + alloc2->numSb-1)) ||
21493 ((alloc2->sbStart >= alloc1->sbStart) &&
21494 (alloc2->sbStart <= alloc1->sbStart + alloc1->numSb-1)))
21502 * @brief Copy allocation Info from src to dst.
21506 * Function : rgSCHCmnUlCpyAllocInfo
21508 * - Copy allocation Info from src to dst.
21510 * @param[in] RgSchUlAlloc *srcAlloc
21511 * @param[in] RgSchUlAlloc *dstAlloc
21516 PRIVATE Void rgSCHCmnUlCpyAllocInfo
21519 RgSchUlAlloc *srcAlloc,
21520 RgSchUlAlloc *dstAlloc
21523 PRIVATE Void rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc)
21525 RgSchUlAlloc *srcAlloc;
21526 RgSchUlAlloc *dstAlloc;
21529 RgSchCmnUlUe *ueUl;
21530 TRC2(rgSCHCmnUlCpyAllocInfo);
21532 dstAlloc->grnt = srcAlloc->grnt;
21533 dstAlloc->hqProc = srcAlloc->hqProc;
21534 /* Fix : syed During UE context release, hqProc->alloc
21535 * was pointing to srcAlloc instead of dstAlloc and
21536 * freeing from incorrect sf->allocDb was
21537 * corrupting the list. */
21538 /* In case of SPS Occasion Allocation is done in advance and
21539 at a later time Hq Proc is linked. Hence HqProc
21540 pointer in alloc shall be NULL */
21542 if (dstAlloc->hqProc)
21545 dstAlloc->hqProc->alloc = dstAlloc;
21547 dstAlloc->ue = srcAlloc->ue;
21548 dstAlloc->rnti = srcAlloc->rnti;
21549 dstAlloc->forMsg3 = srcAlloc->forMsg3;
21550 dstAlloc->raCb = srcAlloc->raCb;
21551 dstAlloc->pdcch = srcAlloc->pdcch;
21552 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
21555 ueUl = RG_SCH_CMN_GET_UL_UE(dstAlloc->ue,cell);
21556 ueUl->alloc.alloc = dstAlloc;
21558 if (dstAlloc->ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
21560 if((dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc != NULLP)
21561 && (dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc == srcAlloc))
21563 dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc = dstAlloc;
21574 * @brief Update TX and RETX subframe's allocation
21579 * Function : rgSCHCmnUlInsAllocFrmNewSf2OldSf
21581 * - Release all preassigned allocations of newSf and merge
21583 * - If alloc of newSf collide with one or more allocs of oldSf
21584 * - mark all such allocs of oldSf for Adaptive Retx.
21585 * - Swap the alloc and hole DB references of oldSf and newSf.
21587 * @param[in] RgSchCellCb *cell
21588 * @param[in] RgSchUlSf *newSf
21589 * @param[in] RgSchUlSf *oldSf
21590 * @param[in] RgSchUlAlloc *srcAlloc
21595 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf
21600 RgSchUlAlloc *srcAlloc
21603 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, srcAlloc)
21607 RgSchUlAlloc *srcAlloc;
21610 RgSchUlAlloc *alloc, *dstAlloc, *nxtAlloc;
21612 /* MS_WORKAROUND ccpu00120827 */
21613 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
21615 TRC2(rgSCHCmnUlInsAllocFrmNewSf2OldSf);
21617 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21621 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21622 /* If there is an overlap between alloc and srcAlloc
21623 * then alloc is marked for Adaptive retx and it is released
21625 if (rgSCHCmnUlAllocsOvrLap(alloc, srcAlloc) == TRUE)
21627 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21628 rgSCHUtlUlAllocRls(oldSf, alloc);
21630 /* No further allocs spanning the srcAlloc subbands */
21631 if (srcAlloc->sbStart + srcAlloc->numSb - 1 <= alloc->sbStart)
21635 } while ((alloc = nxtAlloc) != NULLP);
21638 /* After freeing all the colliding allocs, request for an allocation
21639 * specifying the start and numSb with in txSf. This function should
21640 * always return positively with a nonNULL dstAlloc */
21641 /* MS_WORKAROUND ccpu00120827 */
21642 remAllocs = schCmnCell->ul.maxAllocPerUlSf - *oldSf->allocCountRef;
21645 /* Fix : If oldSf already has max Allocs then release the
21646 * old RETX alloc to make space for new alloc of newSf.
21647 * newSf allocs(i.e new Msg3s) are given higher priority
21648 * over retx allocs. */
21649 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21653 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21654 if (!alloc->mrgdNewTxAlloc)
21656 /* If alloc is for RETX */
21657 /* TODO: Incase of this ad also in case of choosing
21658 * and alloc for ADAP RETX, we need to send ACK for
21659 * the corresponding alloc in PHICH */
21660 #ifndef EMTC_ENABLE
21661 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc);
21663 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc,FALSE);
21667 }while((alloc = nxtAlloc) != NULLP);
21670 dstAlloc = rgSCHUtlUlGetSpfcAlloc(oldSf, srcAlloc->sbStart, srcAlloc->numSb);
21672 /* This should never happen */
21673 if (dstAlloc == NULLP)
21675 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"CRNTI:%d "
21676 "rgSCHUtlUlGetSpfcAlloc failed in rgSCHCmnUlInsAllocFrmNewSf2OldSf",
21681 /* Copy the srcAlloc's state information in to dstAlloc */
21682 rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc);
21683 /* Set new Tx merged Alloc Flag to TRUE, indicating that this
21684 * alloc shall not be processed for non-adaptive retransmission */
21685 dstAlloc->mrgdNewTxAlloc = TRUE;
21690 * @brief Merge all allocations of newSf to oldSf.
21694 * Function : rgSCHCmnUlMergeSfAllocs
21696 * - Merge all allocations of newSf to oldSf.
21697 * - If newSf's alloc collides with oldSf's alloc
21698 * then oldSf's alloc is marked for adaptive Retx
21699 * and is released from oldSf to create space for
21702 * @param[in] RgSchCellCb *cell
21703 * @param[in] RgSchUlSf *oldSf
21704 * @param[in] RgSchUlSf *newSf
21709 PRIVATE Void rgSCHCmnUlMergeSfAllocs
21716 PRIVATE Void rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf)
21722 RgSchUlAlloc *alloc, *nxtAlloc;
21723 TRC2(rgSCHCmnUlMergeSfAllocs);
21726 /* Merge each alloc of newSf in to oldSf
21727 * and release it from newSf */
21728 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21732 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21733 rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, alloc);
21734 rgSCHUtlUlAllocRls(newSf, alloc);
21735 } while((alloc = nxtAlloc) != NULLP);
21741 * @brief Swap Hole/Alloc DB context of newSf and oldSf.
21745 * Function : rgSCHCmnUlSwapSfAllocs
21747 * - Swap Hole/Alloc DB context of newSf and oldSf.
21749 * @param[in] RgSchCellCb *cell
21750 * @param[in] RgSchUlSf *oldSf
21751 * @param[in] RgSchUlSf *newSf
21756 PRIVATE Void rgSCHCmnUlSwapSfAllocs
21763 PRIVATE Void rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf)
21769 RgSchUlAllocDb *tempAllocDb = newSf->allocDb;
21770 RgSchUlHoleDb *tempHoleDb = newSf->holeDb;
21771 U8 tempAvailSbs = newSf->availSubbands;
21773 TRC2(rgSCHCmnUlSwapSfAllocs);
21776 newSf->allocDb = oldSf->allocDb;
21777 newSf->holeDb = oldSf->holeDb;
21778 newSf->availSubbands = oldSf->availSubbands;
21780 oldSf->allocDb = tempAllocDb;
21781 oldSf->holeDb = tempHoleDb;
21782 oldSf->availSubbands = tempAvailSbs;
21784 /* Fix ccpu00120610*/
21785 newSf->allocCountRef = &newSf->allocDb->count;
21786 oldSf->allocCountRef = &oldSf->allocDb->count;
21791 * @brief Perform non-adaptive RETX for non-colliding allocs.
21795 * Function : rgSCHCmnUlPrcNonAdptRetx
21797 * - Perform non-adaptive RETX for non-colliding allocs.
21799 * @param[in] RgSchCellCb *cell
21800 * @param[in] RgSchUlSf *newSf
21801 * @param[in] U8 idx
21806 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx
21813 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx)
21819 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21820 RgSchUlAlloc *alloc, *nxtAlloc;
21821 TRC2(rgSCHCmnUlPrcNonAdptRetx);
21823 /* perform non-adaptive retx allocation(adjustment) */
21824 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21828 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21829 /* A merged new TX alloc, reset the state and skip */
21830 if (alloc->mrgdNewTxAlloc)
21832 alloc->mrgdNewTxAlloc = FALSE;
21837 rgSCHCmnUlNonadapRetx(cellUl, alloc, idx);
21839 } while((alloc = nxtAlloc) != NULLP);
21845 * @brief Update TX and RETX subframe's allocation
21850 * Function : rgSCHCmnUlPrfmSfMerge
21852 * - Release all preassigned allocations of newSf and merge
21854 * - If alloc of newSf collide with one or more allocs of oldSf
21855 * - mark all such allocs of oldSf for Adaptive Retx.
21856 * - Swap the alloc and hole DB references of oldSf and newSf.
21857 * - The allocs which did not collide with pre-assigned msg3
21858 * allocs are marked for non-adaptive RETX.
21860 * @param[in] RgSchCellCb *cell
21861 * @param[in] RgSchUlSf *oldSf
21862 * @param[in] RgSchUlSf *newSf
21863 * @param[in] U8 idx
21868 PRIVATE Void rgSCHCmnUlPrfmSfMerge
21876 PRIVATE Void rgSCHCmnUlPrfmSfMerge(cell, oldSf, newSf, idx)
21883 TRC2(rgSCHCmnUlPrfmSfMerge);
21884 /* Preassigned resources for msg3 in newSf.
21885 * Hence do adaptive retx for all NACKED TXs */
21886 rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf);
21887 /* swap alloc and hole DBs of oldSf and newSf. */
21888 rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf);
21889 /* Here newSf has the resultant merged allocs context */
21890 /* Perform non-adaptive RETX for non-colliding allocs */
21891 rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx);
21897 * @brief Update TX and RETX subframe's allocation
21902 * Function : rgSCHCmnUlRmvCmpltdAllocs
21904 * - Free all Transmission which are ACKED
21905 * OR for which MAX retransmission have
21909 * @param[in] RgSchCellCb *cell,
21910 * @param[in] RgSchUlSf *sf
21914 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs
21920 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs(cell, sf)
21925 RgSchUlAlloc *alloc, *nxtAlloc;
21926 TRC2(rgSCHCmnUlRmvCmpltdAllocs);
21928 if ((alloc = rgSCHUtlUlAllocFirst(sf)) == NULLP)
21934 nxtAlloc = rgSCHUtlUlAllocNxt(sf, alloc);
21936 printf("rgSCHCmnUlRmvCmpltdAllocs:time(%d %d) alloc->hqProc->remTx %d hqProcId(%d) \n",cell->crntTime.sfn,cell->crntTime.slot,alloc->hqProc->remTx, alloc->grnt.hqProcId);
21938 alloc->hqProc->rcvdCrcInd = TRUE;
21939 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
21942 /* SR_RACH_STATS : MSG 3 MAX RETX FAIL*/
21943 if ((alloc->forMsg3 == TRUE) && (alloc->hqProc->remTx == 0))
21945 rgNumMsg3FailMaxRetx++;
21947 cell->tenbStats->sch.msg3Fail++;
21951 #ifdef MAC_SCH_STATS
21952 if(alloc->ue != NULLP)
21954 /* access from ulHarqProc*/
21955 RgSchUeCb *ueCb = alloc->ue;
21956 RgSchCmnUe *cmnUe = (RgSchCmnUe*)ueCb->sch;
21957 RgSchCmnUlUe *ulUe = &(cmnUe->ul);
21958 U8 cqi = ulUe->crntUlCqi[0];
21959 U16 numUlRetx = ueCb->ul.hqEnt.maxHqRetx - alloc->hqProc->remTx;
21961 hqRetxStats.ulCqiStat[(cqi - 1)].mcs = alloc->grnt.iMcs;
21966 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1++;
21969 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2++;
21972 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3++;
21975 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4++;
21978 hqRetxStats.ulCqiStat[(cqi - 1)].totalTx = \
21979 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1 + \
21980 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2 * 2) + \
21981 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3 * 3) + \
21982 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4 * 4);
21985 #endif /*MAC_SCH_STATS*/
21986 rgSCHCmnUlFreeAllocation(cell, sf, alloc);
21988 /*ccpu00106104 MOD added check for AckNackRep */
21989 /*added check for acknack so that adaptive retx considers ue
21990 inactivity due to ack nack repetition*/
21991 else if((alloc->ue != NULLP) && (TRUE != alloc->forMsg3))
21993 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21994 rgSCHUtlUlAllocRls(sf, alloc);
21996 } while ((alloc = nxtAlloc) != NULLP);
22002 * @brief Update an uplink subframe.
22006 * Function : rgSCHCmnRlsUlSf
22008 * For each allocation
22009 * - if no more tx needed
22010 * - Release allocation
22012 * - Perform retransmission
22014 * @param[in] RgSchUlSf *sf
22015 * @param[in] U8 idx
22019 PUBLIC Void rgSCHCmnRlsUlSf
22025 PUBLIC Void rgSCHCmnRlsUlSf(cell, idx)
22030 TRC2(rgSCHCmnRlsUlSf);
22032 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22034 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
22036 RgSchUlSf *oldSf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
22038 /* Initialize the reTxLst of UL HqProcs for RETX subframe */
22039 if (rgSCHUtlUlAllocFirst(oldSf) == NULLP)
22043 /* Release all completed TX allocs from sf */
22044 rgSCHCmnUlRmvCmpltdAllocs(cell, oldSf);
22046 oldSf->numACqiCount = 0;
22052 * @brief Handle uplink allocation for retransmission.
22056 * Function : rgSCHCmnUlUpdAllocRetx
22058 * - Perform adaptive retransmission
22060 * @param[in] RgSchUlSf *sf
22061 * @param[in] RgSchUlAlloc *alloc
22065 PRIVATE Void rgSCHCmnUlUpdAllocRetx
22068 RgSchUlAlloc *alloc
22071 PRIVATE Void rgSCHCmnUlUpdAllocRetx(cell, alloc)
22073 RgSchUlAlloc *alloc;
22076 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
22078 TRC2(rgSCHCmnUlUpdAllocRetx);
22080 alloc->hqProc->reTxAlloc.rnti = alloc->rnti;
22081 alloc->hqProc->reTxAlloc.numSb = alloc->numSb;
22082 alloc->hqProc->reTxAlloc.iMcs = alloc->grnt.iMcs;
22084 alloc->hqProc->reTxAlloc.dciFrmt = alloc->grnt.dciFrmt;
22085 alloc->hqProc->reTxAlloc.numLyr = alloc->grnt.numLyr;
22086 alloc->hqProc->reTxAlloc.vrbgStart = alloc->grnt.vrbgStart;
22087 alloc->hqProc->reTxAlloc.numVrbg = alloc->grnt.numVrbg;
22088 alloc->hqProc->reTxAlloc.modOdr = alloc->grnt.modOdr;
22090 //iTbs = rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs);
22091 //iTbs = alloc->grnt.iMcs;
22092 //RGSCH_ARRAY_BOUND_CHECK( 0, rgTbSzTbl[0], iTbs);
22093 alloc->hqProc->reTxAlloc.tbSz = alloc->grnt.datSz;
22094 //rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1]/8;
22095 alloc->hqProc->reTxAlloc.ue = alloc->ue;
22096 alloc->hqProc->reTxAlloc.forMsg3 = alloc->forMsg3;
22097 alloc->hqProc->reTxAlloc.raCb = alloc->raCb;
22099 /* Set as retransmission is pending */
22100 alloc->hqProc->isRetx = TRUE;
22101 alloc->hqProc->alloc = NULLP;
22102 alloc->hqProc->ulSfIdx = RGSCH_INVALID_INFO;
22104 printf("Adding Harq Proc Id in the retx list hqProcId %d \n",alloc->grnt.hqProcId);
22106 cmLListAdd2Tail(&cmnUlCell->reTxLst, &alloc->hqProc->reTxLnk);
22107 alloc->hqProc->reTxLnk.node = (PTR)alloc->hqProc;
22112 * @brief Attempts allocation for msg3s for which ADAP retransmissions
22117 * Function : rgSCHCmnUlAdapRetxAlloc
22119 * Attempts allocation for msg3s for which ADAP retransmissions
22122 * @param[in] RgSchCellCb *cell
22123 * @param[in] RgSchUlSf *sf
22124 * @param[in] RgSchUlHqProcCb *proc;
22125 * @param[in] RgSchUlHole *hole;
22129 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc
22133 RgSchUlHqProcCb *proc,
22137 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole)
22140 RgSchUlHqProcCb *proc;
22144 U8 numSb = proc->reTxAlloc.numSb;
22145 U8 iMcs = proc->reTxAlloc.iMcs;
22146 CmLteTimingInfo frm = cell->crntTime;
22147 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22150 RgSchUlAlloc *alloc;
22151 TRC2(rgSCHCmnUlAdapRetxAlloc);
22153 /* Fetch PDCCH for msg3 */
22154 /* ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
22155 /* Introduced timing delta for UL control */
22156 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
22157 dlSf = rgSCHUtlSubFrmGet(cell, frm);
22158 pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
22159 if (pdcch == NULLP)
22164 /* Fetch UL Alloc for msg3 */
22165 if (numSb <= hole->num)
22167 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
22172 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22173 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
22174 "UL Alloc fail for msg3 retx for rnti: %d\n",
22175 proc->reTxAlloc.rnti);
22179 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
22180 alloc->grnt.iMcs = iMcs;
22181 alloc->grnt.datSz = proc->reTxAlloc.tbSz;
22184 //RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
22186 /* Fill UL Alloc for msg3 */
22187 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
22188 alloc->grnt.nDmrs = 0;
22189 alloc->grnt.hop = 0;
22190 alloc->grnt.delayBit = 0;
22191 alloc->grnt.isRtx = TRUE;
22192 proc->ulSfIdx = cellUl->schdIdx;
22194 proc->schdTime = cellUl->schdTime;
22195 alloc->grnt.hqProcId = proc->procId;
22196 alloc->grnt.dciFrmt = proc->reTxAlloc.dciFrmt;
22197 alloc->grnt.numLyr = proc->reTxAlloc.numLyr;
22198 alloc->grnt.vrbgStart = proc->reTxAlloc.vrbgStart;
22199 alloc->grnt.numVrbg = proc->reTxAlloc.numVrbg;
22200 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
22201 alloc->grnt.modOdr = proc->reTxAlloc.modOdr;
22203 /* TODO : Hardcoding these as of now */
22204 alloc->grnt.hop = 0;
22205 alloc->grnt.SCID = 0;
22206 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
22207 alloc->grnt.PMI = 0;
22208 alloc->grnt.uciOnxPUSCH = 0;
22210 alloc->rnti = proc->reTxAlloc.rnti;
22211 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22212 alloc->ue = proc->reTxAlloc.ue;
22213 alloc->pdcch = pdcch;
22214 alloc->forMsg3 = proc->reTxAlloc.forMsg3;
22215 alloc->raCb = proc->reTxAlloc.raCb;
22216 alloc->hqProc = proc;
22217 alloc->isAdaptive = TRUE;
22219 sf->totPrb += alloc->grnt.numRb;
22221 /* FIX : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22224 alloc->raCb->msg3Grnt= alloc->grnt;
22226 /* To the crntTime, add the time at which UE will
22227 * actually send MSG3 */
22228 alloc->raCb->msg3AllocTime = cell->crntTime;
22229 RGSCH_INCR_SUB_FRAME(alloc->raCb->msg3AllocTime, RG_SCH_CMN_MIN_RETXMSG3_RECP_INTRVL);
22231 alloc->raCb->msg3AllocTime = cellUl->schdTime;
22233 rgSCHCmnUlAdapRetx(alloc, proc);
22234 /* Fill PDCCH with alloc info */
22235 pdcch->rnti = alloc->rnti;
22236 pdcch->dci.dciFormat = TFU_DCI_FORMAT_0;
22237 pdcch->dci.u.format0Info.hoppingEnbld = alloc->grnt.hop;
22238 pdcch->dci.u.format0Info.rbStart = alloc->grnt.rbStart;
22239 pdcch->dci.u.format0Info.numRb = alloc->grnt.numRb;
22240 pdcch->dci.u.format0Info.mcs = alloc->grnt.iMcsCrnt;
22241 pdcch->dci.u.format0Info.ndi = alloc->hqProc->ndi;
22242 pdcch->dci.u.format0Info.nDmrs = alloc->grnt.nDmrs;
22243 pdcch->dci.u.format0Info.tpcCmd = alloc->grnt.tpc;
22247 /* ulIdx setting for cfg 0 shall be appropriately fixed thru ccpu00109015 */
22248 pdcch->dci.u.format0Info.ulIdx = RG_SCH_ULIDX_MSB;
22249 pdcch->dci.u.format0Info.dai = RG_SCH_MAX_DAI_IDX;
22252 pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_0];
22256 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue,cell);
22258 alloc->ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
22261 ue->ul.nPrb = alloc->grnt.numRb;
22263 ueUl->alloc.alloc = alloc;
22264 /* FIx: Removed the call to rgSCHCmnUlAdapRetx */
22265 rgSCHCmnUlUeFillAllocInfo(cell, alloc->ue);
22266 /* Setting csireq as false for Adaptive Retx*/
22267 ueUl->alloc.alloc->pdcch->dci.u.format0Info.cqiReq = RG_SCH_APCQI_NO;
22268 pdcch->dciNumOfBits = alloc->ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
22270 /* Reset as retransmission is done */
22271 proc->isRetx = FALSE;
22273 else /* Intg fix */
22275 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22276 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
22277 "Num SB not suffiecient for adap retx for rnti: %d",
22278 proc->reTxAlloc.rnti);
22284 /* Fix: syed Adaptive Msg3 Retx crash. */
22286 * @brief Releases all Adaptive Retx HqProcs which failed for
22287 * allocations in this scheduling occassion.
22291 * Function : rgSCHCmnUlSfRlsRetxProcs
22294 * @param[in] RgSchCellCb *cell
22295 * @param[in] RgSchUlSf *sf
22300 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs
22306 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs(cell, sf)
22313 RgSchUlHqProcCb *proc;
22314 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22316 TRC2(rgSCHCmnUlSfRlsRetxProcs);
22318 cp = &(cellUl->reTxLst);
22322 proc = (RgSchUlHqProcCb *)node->node;
22324 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22325 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22326 proc->reTxLnk.node = (PTR)NULLP;
22333 * @brief Attempts allocation for UEs for which retransmissions
22338 * Function : rgSCHCmnUlSfReTxAllocs
22340 * Attempts allocation for UEs for which retransmissions
22343 * @param[in] RgSchCellCb *cell
22344 * @param[in] RgSchUlSf *sf
22348 PRIVATE Void rgSCHCmnUlSfReTxAllocs
22354 PRIVATE Void rgSCHCmnUlSfReTxAllocs(cell, sf)
22361 RgSchUlHqProcCb *proc;
22364 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
22365 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22366 TRC2(rgSCHCmnUlSfReTxAllocs);
22368 cp = &(cellUl->reTxLst);
22372 proc = (RgSchUlHqProcCb *)node->node;
22373 ue = proc->reTxAlloc.ue;
22375 /*ccpu00106104 MOD added check for AckNackRep */
22376 /*added check for acknack so that adaptive retx considers ue
22377 inactivity due to ack nack repetition*/
22378 if((ue != NULLP) &&
22379 ((ue->measGapCb.isMeasuring == TRUE)||
22380 (ue->ackNakRepCb.isAckNakRep == TRUE)))
22384 /* Fix for ccpu00123917: Check if maximum allocs per UL sf have been exhausted */
22385 if (((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
22386 || (sf->allocDb->count == schCmnCell->ul.maxAllocPerUlSf))
22388 /* No more UL BW then return */
22391 /* perform adaptive retx for UE's */
22392 if (rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole) == FALSE)
22396 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22397 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22398 /* Fix: syed Adaptive Msg3 Retx crash. */
22399 proc->reTxLnk.node = (PTR)NULLP;
22405 * @brief Handles RB allocation for downlink.
22409 * Function : rgSCHCmnDlRbAlloc
22411 * Invoking Module Processing:
22412 * - This function is invoked for DL RB allocation
22414 * Processing Steps:
22415 * - If cell is frequency selecive,
22416 * - Call rgSCHDlfsAllocRb().
22418 * - Call rgSCHCmnNonDlfsRbAlloc().
22420 * @param[in] RgSchCellCb *cell
22421 * @param[in] RgSchDlRbAllocInfo *allocInfo
22426 PRIVATE Void rgSCHCmnDlRbAlloc
22429 RgSchCmnDlRbAllocInfo *allocInfo
22432 PRIVATE Void rgSCHCmnDlRbAlloc(cell, allocInfo)
22434 RgSchCmnDlRbAllocInfo *allocInfo;
22437 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
22438 TRC2(rgSCHCmnDlRbAlloc);
22440 if (cellSch->dl.isDlFreqSel)
22442 printf("5GTF_ERROR DLFS SCH Enabled\n");
22443 cellSch->apisDlfs->rgSCHDlfsAllocRb(cell, allocInfo);
22447 rgSCHCmnNonDlfsRbAlloc(cell, allocInfo);
22455 * @brief Determines number of RBGs and RBG subset sizes for the given DL
22456 * bandwidth and rbgSize
22459 * Function : rgSCHCmnDlGetRbgInfo
22462 * Processing Steps:
22463 * - Fill-up rbgInfo data structure for given DL bandwidth and rbgSize
22465 * @param[in] U8 dlTotalBw
22466 * @param[in] U8 dlSubsetBw
22467 * @param[in] U8 maxRaType1SubsetBw
22468 * @param[in] U8 rbgSize
22469 * @param[out] RgSchBwRbgInfo *rbgInfo
22473 PUBLIC Void rgSCHCmnDlGetRbgInfo
22477 U8 maxRaType1SubsetBw,
22479 RgSchBwRbgInfo *rbgInfo
22482 PUBLIC Void rgSCHCmnDlGetRbgInfo(dlTotalBw, dlSubsetBw, maxRaType1SubsetBw,
22486 U8 maxRaType1SubsetBw;
22488 RgSchBwRbgInfo *rbgInfo;
22491 #ifdef RGSCH_SPS_UNUSED
22493 U8 lastRbgIdx = ((dlTotalBw + rbgSize - 1)/rbgSize) - 1;
22494 U8 currRbgSize = rbgSize;
22495 U8 subsetSizeIdx = 0;
22496 U8 subsetSize[RG_SCH_NUM_RATYPE1_SUBSETS] = {0};
22497 U8 lastRbgSize = rbgSize - (dlTotalBw - ((dlTotalBw/rbgSize) * rbgSize));
22498 U8 numRaType1Rbgs = (maxRaType1SubsetBw + rbgSize - 1)/rbgSize;
22501 /* Compute maximum number of SPS RBGs for the cell */
22502 rbgInfo->numRbgs = ((dlSubsetBw + rbgSize - 1)/rbgSize);
22504 #ifdef RGSCH_SPS_UNUSED
22505 /* Distribute RBGs across subsets except last RBG */
22506 for (;idx < numRaType1Rbgs - 1; ++idx)
22508 subsetSize[subsetSizeIdx] += currRbgSize;
22509 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22512 /* Computation for last RBG */
22513 if (idx == lastRbgIdx)
22515 currRbgSize = lastRbgSize;
22517 subsetSize[subsetSizeIdx] += currRbgSize;
22518 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22521 /* Update the computed sizes */
22522 #ifdef RGSCH_SPS_UNUSED
22523 rbgInfo->lastRbgSize = currRbgSize;
22525 rbgInfo->lastRbgSize = rbgSize -
22526 (dlSubsetBw - ((dlSubsetBw/rbgSize) * rbgSize));
22527 #ifdef RGSCH_SPS_UNUSED
22528 cmMemcpy((U8 *)rbgInfo->rbgSubsetSize, (U8 *) subsetSize, 4 * sizeof(U8));
22530 rbgInfo->numRbs = (rbgInfo->numRbgs * rbgSize > dlTotalBw) ?
22531 dlTotalBw:(rbgInfo->numRbgs * rbgSize);
22532 rbgInfo->rbgSize = rbgSize;
22536 * @brief Handles RB allocation for Resource allocation type 0
22540 * Function : rgSCHCmnDlRaType0Alloc
22542 * Invoking Module Processing:
22543 * - This function is invoked for DL RB allocation for resource allocation
22546 * Processing Steps:
22547 * - Determine the available positions in the rbgMask.
22548 * - Allocate RBGs in the available positions.
22549 * - Update RA Type 0, RA Type 1 and RA type 2 masks.
22551 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22552 * @param[in] U8 rbsReq
22553 * @param[in] RgSchBwRbgInfo *rbgInfo
22554 * @param[out] U8 *numAllocRbs
22555 * @param[out] RgSchDlSfAllocInfo *resAllocInfo
22556 * @param[in] Bool isPartialAlloc
22562 PUBLIC U8 rgSCHCmnDlRaType0Alloc
22564 RgSchDlSfAllocInfo *allocedInfo,
22566 RgSchBwRbgInfo *rbgInfo,
22568 RgSchDlSfAllocInfo *resAllocInfo,
22569 Bool isPartialAlloc
22572 PUBLIC U8 rgSCHCmnDlRaType0Alloc(allocedInfo, rbsReq, rbgInfo,
22573 numAllocRbs, resAllocInfo, isPartialAlloc)
22574 RgSchDlSfAllocInfo *allocedInfo;
22576 RgSchBwRbgInfo *rbgInfo;
22578 RgSchDlSfAllocInfo *resAllocInfo;
22579 Bool isPartialAlloc;
22582 /* Note: This function atttempts allocation only full allocation */
22583 U32 remNumRbs, rbgPosInRbgMask, ueRaType2Mask;
22584 U8 type2MaskIdx, cnt, rbIdx;
22586 U8 bestNumAvailRbs = 0;
22588 U8 numAllocRbgs = 0;
22589 U8 rbgSize = rbgInfo->rbgSize;
22590 U32 *rbgMask = &(resAllocInfo->raType0Mask);
22591 #ifdef RGSCH_SPS_UNUSED
22594 U32 *raType1Mask = resAllocInfo->raType1Mask;
22595 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22597 U32 *raType2Mask = resAllocInfo->raType2Mask;
22599 U32 allocedMask = allocedInfo->raType0Mask;
22601 maskSize = rbgInfo->numRbgs;
22604 RG_SCH_CMN_DL_COUNT_ONES(allocedMask, maskSize, &usedRbs);
22605 if (maskSize == usedRbs)
22607 /* All RBGs are allocated, including the last one */
22612 remNumRbs = (maskSize - usedRbs - 1) * rbgSize; /* vamsee: removed minus 1 */
22614 /* If last RBG is available, add last RBG size */
22615 if (!(allocedMask & (1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(maskSize - 1))))
22617 remNumRbs += rbgInfo->lastRbgSize;
22621 /* If complete allocation is needed, check if total requested RBs are available else
22622 * check the best available RBs */
22623 if (!isPartialAlloc)
22625 if (remNumRbs >= rbsReq)
22627 bestNumAvailRbs = rbsReq;
22632 bestNumAvailRbs = remNumRbs > rbsReq ? rbsReq : remNumRbs;
22635 /* Allocate for bestNumAvailRbs */
22636 if (bestNumAvailRbs)
22638 for (rbg = 0; rbg < maskSize - 1; ++rbg)
22640 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22641 if (!(allocedMask & rbgPosInRbgMask))
22643 /* Update RBG mask */
22644 *rbgMask |= rbgPosInRbgMask;
22646 /* Compute RB index of the first RB of the RBG allocated */
22647 rbIdx = rbg * rbgSize;
22649 for (cnt = 0; cnt < rbgSize; ++cnt)
22651 #ifdef RGSCH_SPS_UNUSED
22652 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22654 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22655 #ifdef RGSCH_SPS_UNUSED
22656 /* Update RBG mask for RA type 1 */
22657 raType1Mask[rbgSubset] |= ueRaType1Mask;
22658 raType1UsedRbs[rbgSubset]++;
22660 /* Update RA type 2 mask */
22661 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22664 *numAllocRbs += rbgSize;
22665 remNumRbs -= rbgSize;
22667 if (*numAllocRbs >= bestNumAvailRbs)
22673 /* If last RBG available and allocation is not completed, allocate
22675 if (*numAllocRbs < bestNumAvailRbs)
22677 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22678 *rbgMask |= rbgPosInRbgMask;
22679 *numAllocRbs += rbgInfo->lastRbgSize;
22681 /* Compute RB index of the first RB of the last RBG */
22682 rbIdx = ((rbgInfo->numRbgs - 1 ) * rbgSize ); /* removed minus 1 vamsee */
22684 for (cnt = 0; cnt < rbgInfo->lastRbgSize; ++cnt)
22686 #ifdef RGSCH_SPS_UNUSED
22687 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22689 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22690 #ifdef RGSCH_SPS_UNUSED
22691 /* Update RBG mask for RA type 1 */
22692 raType1Mask[rbgSubset] |= ueRaType1Mask;
22693 raType1UsedRbs[rbgSubset]++;
22695 /* Update RA type 2 mask */
22696 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22699 remNumRbs -= rbgInfo->lastRbgSize;
22702 /* Note: this should complete allocation, not checking for the
22706 RETVALUE(numAllocRbgs);
22709 #ifdef RGSCH_SPS_UNUSED
22711 * @brief Handles RB allocation for Resource allocation type 1
22715 * Function : rgSCHCmnDlRaType1Alloc
22717 * Invoking Module Processing:
22718 * - This function is invoked for DL RB allocation for resource allocation
22721 * Processing Steps:
22722 * - Determine the available positions in the subsets.
22723 * - Allocate RB in the available subset.
22724 * - Update RA Type1, RA type 0 and RA type 2 masks.
22726 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22727 * @param[in] U8 rbsReq
22728 * @param[in] RgSchBwRbgInfo *rbgInfo
22729 * @param[in] U8 startRbgSubset
22730 * @param[in] U8 *allocRbgSubset
22731 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22732 * @param[in] Bool isPartialAlloc
22735 * Number of allocated RBs
22739 PUBLIC U8 rgSCHCmnDlRaType1Alloc
22741 RgSchDlSfAllocInfo *allocedInfo,
22743 RgSchBwRbgInfo *rbgInfo,
22745 U8 *allocRbgSubset,
22746 RgSchDlSfAllocInfo *resAllocInfo,
22747 Bool isPartialAlloc
22750 PUBLIC U8 rgSCHCmnDlRaType1Alloc(allocedInfo, rbsReq,rbgInfo,startRbgSubset,
22751 allocRbgSubset, resAllocInfo, isPartialAlloc)
22752 RgSchDlSfAllocInfo *allocedInfo;
22754 RgSchBwRbgInfo *rbgInfo;
22756 U8 *allocRbgSubset;
22757 RgSchDlSfAllocInfo *resAllocInfo;
22758 Bool isPartialAlloc;
22761 /* Note: This function atttempts only full allocation */
22762 U8 *rbgSubsetSzArr;
22763 U8 type2MaskIdx, subsetIdx, rbIdx, rbInSubset, rbgInSubset;
22764 U8 offset, rbg, maskSize, bestSubsetIdx;
22766 U8 bestNumAvailRbs = 0;
22767 U8 numAllocRbs = 0;
22768 U32 ueRaType2Mask, ueRaType0Mask, rbPosInSubset;
22769 U32 remNumRbs, allocedMask;
22771 U8 rbgSize = rbgInfo->rbgSize;
22772 U8 rbgSubset = startRbgSubset;
22773 U32 *rbgMask = &resAllocInfo->raType0Mask;
22774 U32 *raType1Mask = resAllocInfo->raType1Mask;
22775 U32 *raType2Mask = resAllocInfo->raType2Mask;
22776 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22777 U32 *allocMask = allocedInfo->raType1Mask;
22779 /* Initialize the subset size Array */
22780 rbgSubsetSzArr = rbgInfo->rbgSubsetSize;
22782 /* Perform allocation for RA type 1 */
22783 for (subsetIdx = 0;subsetIdx < rbgSize; ++subsetIdx)
22785 allocedMask = allocMask[rbgSubset];
22786 maskSize = rbgSubsetSzArr[rbgSubset];
22788 /* Determine number of available RBs in the subset */
22789 usedRbs = allocedInfo->raType1UsedRbs[subsetIdx];
22790 remNumRbs = maskSize - usedRbs;
22792 if (remNumRbs >= rbsReq)
22794 bestNumAvailRbs = rbsReq;
22795 bestSubsetIdx = rbgSubset;
22798 else if (isPartialAlloc && (remNumRbs > bestNumAvailRbs))
22800 bestNumAvailRbs = remNumRbs;
22801 bestSubsetIdx = rbgSubset;
22804 rbgSubset = (rbgSubset + 1) % rbgSize;
22805 } /* End of for (each rbgsubset) */
22807 if (bestNumAvailRbs)
22809 /* Initialize alloced mask and subsetSize depending on the RBG
22810 * subset of allocation */
22812 maskSize = rbgSubsetSzArr[bestSubsetIdx];
22813 allocedMask = allocMask[bestSubsetIdx];
22814 RG_SCH_CMN_DL_GET_START_POS(allocedMask, maskSize,
22816 for (; startIdx < rbgSize; ++startIdx, ++startPos)
22818 for (rbInSubset = startPos; rbInSubset < maskSize;
22819 rbInSubset = rbInSubset + rbgSize)
22821 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
22822 if (!(allocedMask & rbPosInSubset))
22824 raType1Mask[bestSubsetIdx] |= rbPosInSubset;
22825 raType1UsedRbs[bestSubsetIdx]++;
22827 /* Compute RB index value for the RB being allocated */
22828 rbgInSubset = rbInSubset /rbgSize;
22829 offset = rbInSubset % rbgSize;
22830 rbg = (rbgInSubset * rbgSize) + bestSubsetIdx;
22831 rbIdx = (rbg * rbgSize) + offset;
22833 /* Update RBG mask for RA type 0 allocation */
22834 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22835 *rbgMask |= ueRaType0Mask;
22837 /* Update RA type 2 mask */
22838 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22839 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22841 /* Update the counters */
22844 if (numAllocRbs == bestNumAvailRbs)
22849 } /* End of for (each position in the subset mask) */
22850 if (numAllocRbs == bestNumAvailRbs)
22854 } /* End of for startIdx = 0 to rbgSize */
22856 *allocRbgSubset = bestSubsetIdx;
22857 } /* End of if (bestNumAvailRbs) */
22859 RETVALUE(numAllocRbs);
22863 * @brief Handles RB allocation for Resource allocation type 2
22867 * Function : rgSCHCmnDlRaType2Alloc
22869 * Invoking Module Processing:
22870 * - This function is invoked for DL RB allocation for resource allocation
22873 * Processing Steps:
22874 * - Determine the available positions in the mask
22875 * - Allocate best fit cosecutive RBs.
22876 * - Update RA Type2, RA type 1 and RA type 0 masks.
22878 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22879 * @param[in] U8 rbsReq
22880 * @param[in] RgSchBwRbgInfo *rbgInfo
22881 * @param[out] U8 *rbStart
22882 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22883 * @param[in] Bool isPartialAlloc
22886 * Number of allocated RBs
22890 PUBLIC U8 rgSCHCmnDlRaType2Alloc
22892 RgSchDlSfAllocInfo *allocedInfo,
22894 RgSchBwRbgInfo *rbgInfo,
22896 RgSchDlSfAllocInfo *resAllocInfo,
22897 Bool isPartialAlloc
22900 PUBLIC U8 rgSCHCmnDlRaType2Alloc(allocedInfo, rbsReq, rbgInfo, rbStart,
22901 resAllocInfo, isPartialAlloc)
22902 RgSchDlSfAllocInfo *allocedInfo;
22904 RgSchBwRbgInfo *rbgInfo;
22906 RgSchDlSfAllocInfo *resAllocInfo;
22907 Bool isPartialAlloc;
22910 U8 numAllocRbs = 0;
22912 U8 rbgSize = rbgInfo->rbgSize;
22913 U32 *rbgMask = &resAllocInfo->raType0Mask;
22914 #ifdef RGSCH_SPS_UNUSED
22915 U32 *raType1Mask = resAllocInfo->raType1Mask;
22917 U32 *raType2Mask = resAllocInfo->raType2Mask;
22918 #ifdef RGSCH_SPS_UNUSED
22919 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22921 U32 *allocedMask = allocedInfo->raType2Mask;
22923 /* Note: This function atttempts only full allocation */
22924 rgSCHCmnDlGetBestFitHole(allocedMask, rbgInfo->numRbs,
22925 raType2Mask, rbsReq, rbStart, &numAllocRbs, isPartialAlloc);
22928 /* Update the allocation in RA type 0 and RA type 1 masks */
22929 U8 rbCnt = numAllocRbs;
22930 #ifdef RGSCH_SPS_UNUSED
22939 /* Update RBG mask for RA type 0 allocation */
22940 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22941 *rbgMask |= ueRaType0Mask;
22943 #ifdef RGSCH_SPS_UNUSED
22944 /* Update RBG mask for RA type 1 */
22945 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22946 raType1Mask[rbgSubset] |= ueRaType1Mask;
22947 raType1UsedRbs[rbgSubset]++;
22949 /* Update the counters */
22955 RETVALUE(numAllocRbs);
22959 * @brief Determines RA type 0 mask from given RB index.
22963 * Function : rgSCHCmnGetRaType0Mask
22966 * Processing Steps:
22967 * - Determine RA Type 0 mask for given rbIdex and rbg size.
22969 * @param[in] U8 rbIdx
22970 * @param[in] U8 rbgSize
22971 * @return U32 RA type 0 mask
22974 PRIVATE U32 rgSCHCmnGetRaType0Mask
22980 PRIVATE U32 rgSCHCmnGetRaType0Mask(rbIdx, rbgSize)
22986 U32 rbgPosInRbgMask = 0;
22988 rbg = rbIdx/rbgSize;
22989 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22991 RETVALUE(rbgPosInRbgMask);
22994 #ifdef RGSCH_SPS_UNUSED
22996 * @brief Determines RA type 1 mask from given RB index.
23000 * Function : rgSCHCmnGetRaType1Mask
23003 * Processing Steps:
23004 * - Determine RA Type 1 mask for given rbIdex and rbg size.
23006 * @param[in] U8 rbIdx
23007 * @param[in] U8 rbgSize
23008 * @param[out] U8 *type1Subset
23009 * @return U32 RA type 1 mask
23012 PRIVATE U32 rgSCHCmnGetRaType1Mask
23019 PRIVATE U32 rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, type1Subset)
23025 U8 rbg, rbgSubset, rbgInSubset, offset, rbInSubset;
23028 rbg = rbIdx/rbgSize;
23029 rbgSubset = rbg % rbgSize;
23030 rbgInSubset = rbg/rbgSize;
23031 offset = rbIdx % rbgSize;
23032 rbInSubset = rbgInSubset * rbgSize + offset;
23033 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
23035 *type1Subset = rbgSubset;
23036 RETVALUE(rbPosInSubset);
23038 #endif /* RGSCH_SPS_UNUSED */
23040 * @brief Determines RA type 2 mask from given RB index.
23044 * Function : rgSCHCmnGetRaType2Mask
23047 * Processing Steps:
23048 * - Determine RA Type 2 mask for given rbIdx and rbg size.
23050 * @param[in] U8 rbIdx
23051 * @param[out] U8 *maskIdx
23052 * @return U32 RA type 2 mask
23055 PRIVATE U32 rgSCHCmnGetRaType2Mask
23061 PRIVATE U32 rgSCHCmnGetRaType2Mask(rbIdx, maskIdx)
23068 *maskIdx = rbIdx / 32;
23069 rbPosInType2 = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbIdx % 32);
23071 RETVALUE(rbPosInType2);
23075 * @brief Performs resource allocation for a non-SPS UE in SPS bandwidth
23079 * Function : rgSCHCmnAllocUeInSpsBw
23082 * Processing Steps:
23083 * - Determine allocation for the UE.
23084 * - Use resource allocation type 0, 1 and 2 for allocation
23085 * within maximum SPS bandwidth.
23087 * @param[in] RgSchDlSf *dlSf
23088 * @param[in] RgSchCellCb *cell
23089 * @param[in] RgSchUeCb *ue
23090 * @param[in] RgSchDlRbAlloc *rbAllocInfo
23091 * @param[in] Bool isPartialAlloc
23097 PUBLIC Bool rgSCHCmnAllocUeInSpsBw
23102 RgSchDlRbAlloc *rbAllocInfo,
23103 Bool isPartialAlloc
23106 PUBLIC Bool rgSCHCmnAllocUeInSpsBw(dlSf, cell, ue, rbAllocInfo, isPartialAlloc)
23110 RgSchDlRbAlloc *rbAllocInfo;
23111 Bool isPartialAlloc;
23114 U8 rbgSize = cell->rbgSize;
23115 U8 numAllocRbs = 0;
23116 U8 numAllocRbgs = 0;
23118 U8 idx, noLyr, iTbs;
23119 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
23120 RgSchDlSfAllocInfo *dlSfAlloc = &rbAllocInfo->dlSf->dlSfAllocInfo;
23121 RgSchBwRbgInfo *spsRbgInfo = &cell->spsBwRbgInfo;
23123 /* SPS_FIX : Check if this Hq proc is scheduled */
23124 if ((0 == rbAllocInfo->tbInfo[0].schdlngForTb) &&
23125 (0 == rbAllocInfo->tbInfo[1].schdlngForTb))
23130 /* Check if the requirement can be accomodated in SPS BW */
23131 if (dlSf->spsAllocdBw == spsRbgInfo->numRbs)
23133 /* SPS Bandwidth has been exhausted: no further allocations possible */
23136 if (!isPartialAlloc)
23138 if((dlSf->spsAllocdBw + rbAllocInfo->rbsReq) > spsRbgInfo->numRbs)
23144 /* Perform allocation for RA type 0 if rbsReq is multiple of RBG size (also
23145 * if RBG size = 1) */
23146 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23148 rbAllocInfo->rbsReq += (rbgSize - rbAllocInfo->rbsReq % rbgSize);
23149 numAllocRbgs = rgSCHCmnDlRaType0Alloc(dlSfAlloc,
23150 rbAllocInfo->rbsReq, spsRbgInfo, &numAllocRbs,
23151 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23153 #ifdef RGSCH_SPS_UNUSED
23154 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23156 /* If no RBS could be allocated, attempt RA TYPE 1 */
23158 numAllocRbs = rgSCHCmnDlRaType1Alloc(dlSfAlloc,
23159 rbAllocInfo->rbsReq, spsRbgInfo, (U8)dlSfAlloc->nxtRbgSubset,
23160 &rbAllocInfo->allocInfo.raType1.rbgSubset,
23161 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23165 dlSfAlloc->nxtRbgSubset =
23166 (rbAllocInfo->allocInfo.raType1.rbgSubset + 1 ) % rbgSize;
23170 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23172 numAllocRbs = rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23173 rbAllocInfo->rbsReq, spsRbgInfo,
23174 &rbStart, &rbAllocInfo->resAllocInfo, isPartialAlloc);
23181 if (!(rbAllocInfo->pdcch =
23182 rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi,\
23183 rbAllocInfo->dciFormat, FALSE)))
23185 /* Note: Returning TRUE since PDCCH might be available for another UE */
23189 /* Update Tb info for each scheduled TB */
23190 iTbs = rbAllocInfo->tbInfo[0].iTbs;
23191 noLyr = rbAllocInfo->tbInfo[0].noLyr;
23192 rbAllocInfo->tbInfo[0].bytesAlloc =
23193 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;
23195 if (rbAllocInfo->tbInfo[1].schdlngForTb)
23197 iTbs = rbAllocInfo->tbInfo[1].iTbs;
23198 noLyr = rbAllocInfo->tbInfo[1].noLyr;
23199 rbAllocInfo->tbInfo[1].bytesAlloc =
23200 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;;
23203 /* Update rbAllocInfo with the allocation information */
23204 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23206 rbAllocInfo->allocInfo.raType0.dlAllocBitMask =
23207 rbAllocInfo->resAllocInfo.raType0Mask;
23208 rbAllocInfo->allocInfo.raType0.numDlAlloc = numAllocRbgs;
23210 #ifdef RGSCH_SPS_UNUSED
23211 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23213 rbAllocInfo->allocInfo.raType1.dlAllocBitMask =
23214 rbAllocInfo->resAllocInfo.raType1Mask[rbAllocInfo->allocInfo.raType1.rbgSubset];
23215 rbAllocInfo->allocInfo.raType1.numDlAlloc = numAllocRbs;
23216 rbAllocInfo->allocInfo.raType1.shift = 0;
23219 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23221 rbAllocInfo->allocInfo.raType2.isLocal = TRUE;
23222 rbAllocInfo->allocInfo.raType2.rbStart = rbStart;
23223 rbAllocInfo->allocInfo.raType2.numRb = numAllocRbs;
23226 rbAllocInfo->rbsAlloc = numAllocRbs;
23227 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
23229 /* Update allocation masks for RA types 0, 1 and 2 in DL SF */
23231 /* Update type 0 allocation mask */
23232 dlSfAlloc->raType0Mask |= rbAllocInfo->resAllocInfo.raType0Mask;
23233 #ifdef RGSCH_SPS_UNUSED
23234 /* Update type 1 allocation masks */
23235 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
23237 dlSfAlloc->raType1Mask[idx] |= rbAllocInfo->resAllocInfo.raType1Mask[idx];
23238 dlSfAlloc->raType1UsedRbs[idx] +=
23239 rbAllocInfo->resAllocInfo.raType1UsedRbs[idx];
23242 /* Update type 2 allocation masks */
23243 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
23245 dlSfAlloc->raType2Mask[idx] |= rbAllocInfo->resAllocInfo.raType2Mask[idx];
23248 dlSf->spsAllocdBw += numAllocRbs;
23252 /***********************************************************
23254 * Func : rgSCHCmnDlGetBestFitHole
23257 * Desc : Converts the best fit hole into allocation and returns the
23258 * allocation information.
23268 **********************************************************/
23270 PRIVATE Void rgSCHCmnDlGetBestFitHole
23274 U32 *crntAllocMask,
23278 Bool isPartialAlloc
23281 PRIVATE Void rgSCHCmnDlGetBestFitHole (allocMask, numMaskRbs,
23282 crntAllocMask, rbsReq, allocStart, allocNumRbs, isPartialAlloc)
23285 U32 *crntAllocMask;
23289 Bool isPartialAlloc;
23292 U8 maskSz = (numMaskRbs + 31)/32;
23293 U8 maxMaskPos = (numMaskRbs % 32);
23294 U8 maskIdx, maskPos;
23295 U8 numAvailRbs = 0;
23296 U8 bestAvailNumRbs = 0;
23297 S8 bestStartPos = -1;
23299 U32 tmpMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23300 U32 bestMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23302 *allocNumRbs = numAvailRbs;
23305 for (maskIdx = 0; maskIdx < maskSz; ++maskIdx)
23308 if (maskIdx == (maskSz - 1))
23310 if (numMaskRbs % 32)
23312 maxMaskPos = numMaskRbs % 32;
23315 for (maskPos = 0; maskPos < maxMaskPos; ++maskPos)
23317 if (!(allocMask[maskIdx] & (1 << (31 - maskPos))))
23319 tmpMask[maskIdx] |= (1 << (31 - maskPos));
23320 if (startPos == -1)
23322 startPos = maskIdx * 32 + maskPos;
23325 if (numAvailRbs == rbsReq)
23327 *allocStart = (U8)startPos;
23328 *allocNumRbs = rbsReq;
23334 if (numAvailRbs > bestAvailNumRbs)
23336 bestAvailNumRbs = numAvailRbs;
23337 bestStartPos = startPos;
23338 cmMemcpy((U8 *)bestMask, (U8 *) tmpMask, 4 * sizeof(U32));
23342 cmMemset((U8 *)tmpMask, 0, 4 * sizeof(U32));
23345 if (*allocNumRbs == rbsReq)
23351 if (*allocNumRbs == rbsReq)
23353 /* Convert the hole into allocation */
23354 cmMemcpy((U8 *)crntAllocMask, (U8 *) tmpMask, 4 * sizeof(U32));
23359 if (bestAvailNumRbs && isPartialAlloc)
23361 /* Partial allocation could have been done */
23362 *allocStart = (U8)bestStartPos;
23363 *allocNumRbs = bestAvailNumRbs;
23364 /* Convert the hole into allocation */
23365 cmMemcpy((U8 *)crntAllocMask, (U8 *) bestMask, 4 * sizeof(U32));
23371 #endif /* LTEMAC_SPS */
23373 /***************************************************************************
23375 * NON-DLFS Allocation functions
23377 * *************************************************************************/
23381 * @brief Function to find out code rate
23385 * Function : rgSCHCmnFindCodeRate
23387 * Processing Steps:
23389 * @param[in] RgSchCellCb *cell
23390 * @param[in] RgSchDlSf *dlSf
23391 * @param[in,out] RgSchDlRbAlloc *allocInfo
23396 PRIVATE Void rgSCHCmnFindCodeRate
23400 RgSchDlRbAlloc *allocInfo,
23404 PRIVATE Void rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,idx)
23407 RgSchDlRbAlloc *allocInfo;
23416 /* Adjust the Imcs and bytes allocated also with respect to the adjusted
23417 RBs - Here we will find out the Imcs by identifying first Highest
23418 number of bits compared to the original bytes allocated. */
23420 * @brief Adjust IMCS according to tbSize and ITBS
23424 * Function : rgSCHCmnNonDlfsPbchTbImcsAdj
23426 * Processing Steps:
23427 * - Adjust Imcs according to tbSize and ITBS.
23429 * @param[in,out] RgSchDlRbAlloc *allocInfo
23430 * @param[in] U8 *idx
23434 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj
23437 RgSchDlRbAlloc *allocInfo,
23442 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj(cell,allocInfo, idx, rbsReq)
23444 RgSchDlRbAlloc *allocInfo;
23454 RgSchDlSf *dlSf = allocInfo->dlSf;
23456 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23457 noLyrs = allocInfo->tbInfo[idx].noLyr;
23459 if((allocInfo->raType == RG_SCH_CMN_RA_TYPE0))
23461 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + dlSf->lstRbgDfct), cell->rbgSize);
23462 noRbs = (noRbgs * cell->rbgSize) - dlSf->lstRbgDfct;
23466 noRbs = allocInfo->rbsReq;
23469 /* This line will help in case if tbs is zero and reduction in MCS is not possible */
23470 if (allocInfo->rbsReq == 0 )
23474 origBytesReq = rgTbSzTbl[noLyrs - 1][tbs][rbsReq - 1]/8;
23476 /* Find out the ITbs & Imcs by identifying first Highest
23477 number of bits compared to the original bytes allocated.*/
23480 if(((rgTbSzTbl[noLyrs - 1][0][noRbs - 1])/8) < origBytesReq)
23482 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyrs - 1], tbs);
23483 while(((rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1])/8) > origBytesReq)
23492 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1]/8;
23493 allocInfo->tbInfo[idx].iTbs = tbs;
23494 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23499 /* Added funcion to adjust TBSize*/
23501 * @brief Function to adjust the tbsize in case of subframe 0 & 5 when
23502 * we were not able to do RB alloc adjustment by adding extra required Rbs
23506 * Function : rgSCHCmnNonDlfsPbchTbSizeAdj
23508 * Processing Steps:
23510 * @param[in,out] RgSchDlRbAlloc *allocInfo
23511 * @param[in] U8 numOvrlapgPbchRb
23512 * @param[in] U8 idx
23513 * @param[in] U8 pbchSsRsSym
23517 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj
23519 RgSchDlRbAlloc *allocInfo,
23520 U8 numOvrlapgPbchRb,
23526 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,idx,bytesReq)
23527 RgSchDlRbAlloc *allocInfo;
23528 U8 numOvrlapgPbchRb;
23534 U32 reducedTbs = 0;
23538 noLyrs = allocInfo->tbInfo[idx].noLyr;
23540 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23542 reducedTbs = bytesReq - (((U32)numOvrlapgPbchRb * (U32)pbchSsRsSym * 6)/8);
23544 /* find out the ITbs & Imcs by identifying first Highest
23545 number of bits compared with reduced bits considering the bits that are
23546 reserved for PBCH/PSS/SSS */
23547 if(((rgTbSzTbl[noLyrs - 1][0][allocInfo->rbsReq - 1])/8) < reducedTbs)
23549 while(((rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1])/8) > reducedTbs)
23558 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1]/8;
23559 allocInfo->tbInfo[idx].iTbs = tbs;
23560 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23565 /* Added this function to find num of ovrlapping PBCH rb*/
23567 * @brief Function to find out how many additional rbs are available
23568 * in the entire bw which can be allocated to a UE
23571 * Function : rgSCHCmnFindNumAddtlRbsAvl
23573 * Processing Steps:
23574 * - Calculates number of additinal rbs available
23576 * @param[in] RgSchCellCb *cell
23577 * @param[in] RgSchDlSf *dlSf
23578 * @param[in,out] RgSchDlRbAlloc *allocInfo
23579 * @param[out] U8 addtlRbsAvl
23583 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl
23587 RgSchDlRbAlloc *allocInfo
23590 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl(cell,dlSf,allocInfo)
23593 RgSchDlRbAlloc *allocInfo;
23596 U8 addtlRbsAvl = 0;
23598 TRC2(rgSCHCmnFindNumAddtlRbsAvl)
23600 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23602 addtlRbsAvl = (((dlSf->type0End - dlSf->type2End + 1)*\
23603 cell->rbgSize) - dlSf->lstRbgDfct) - allocInfo->rbsReq;
23605 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23607 addtlRbsAvl = (dlSf->bw - dlSf->bwAlloced) - allocInfo->rbsReq;
23610 RETVALUE(addtlRbsAvl);
23613 /* Added this function to find num of ovrlapping PBCH rb*/
23615 * @brief Function to find out how many of the requested RBs are
23616 * falling in the center 6 RBs of the downlink bandwidth.
23619 * Function : rgSCHCmnFindNumPbchOvrlapRbs
23621 * Processing Steps:
23622 * - Calculates number of overlapping rbs
23624 * @param[in] RgSchCellCb *cell
23625 * @param[in] RgSchDlSf *dlSf
23626 * @param[in,out] RgSchDlRbAlloc *allocInfo
23627 * @param[out] U8* numOvrlapgPbchRb
23631 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs
23635 RgSchDlRbAlloc *allocInfo,
23636 U8 *numOvrlapgPbchRb
23639 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,numOvrlapgPbchRb)
23642 RgSchDlRbAlloc *allocInfo;
23643 U8 *numOvrlapgPbchRb;
23646 *numOvrlapgPbchRb = 0;
23647 TRC2(rgSCHCmnFindNumPbchOvrlapRbs)
23648 /*Find if we have already crossed the start boundary for PBCH 6 RBs,
23649 * if yes then lets find the number of RBs which are getting overlapped
23650 * with this allocation.*/
23651 if(dlSf->bwAlloced <= (cell->pbchRbStart))
23653 /*We have not crossed the start boundary of PBCH RBs. Now we need
23654 * to know that if take this allocation then how much PBCH RBs
23655 * are overlapping with this allocation.*/
23656 /* Find out the overlapping RBs in the centre 6 RBs */
23657 if((dlSf->bwAlloced + allocInfo->rbsReq) > cell->pbchRbStart)
23659 *numOvrlapgPbchRb = (dlSf->bwAlloced + allocInfo->rbsReq) - (cell->pbchRbStart);
23660 if(*numOvrlapgPbchRb > 6)
23661 *numOvrlapgPbchRb = 6;
23664 else if ((dlSf->bwAlloced > (cell->pbchRbStart)) &&
23665 (dlSf->bwAlloced < (cell->pbchRbEnd)))
23667 /*We have already crossed the start boundary of PBCH RBs.We need to
23668 * find that if we take this allocation then how much of the RBs for
23669 * this allocation will overlap with PBCH RBs.*/
23670 /* Find out the overlapping RBs in the centre 6 RBs */
23671 if(dlSf->bwAlloced + allocInfo->rbsReq < (cell->pbchRbEnd))
23673 /*If we take this allocation then also we are not crossing the
23674 * end boundary of PBCH 6 RBs.*/
23675 *numOvrlapgPbchRb = allocInfo->rbsReq;
23679 /*If we take this allocation then we are crossing the
23680 * end boundary of PBCH 6 RBs.*/
23681 *numOvrlapgPbchRb = (cell->pbchRbEnd) - dlSf->bwAlloced;
23688 * @brief Performs RB allocation adjustment if the requested RBs are
23689 * falling in the center 6 RBs of the downlink bandwidth.
23692 * Function : rgSCHCmnNonDlfsPbchRbAllocAdj
23694 * Processing Steps:
23695 * - Allocate consecutively available RBs.
23697 * @param[in] RgSchCellCb *cell
23698 * @param[in,out] RgSchDlRbAlloc *allocInfo
23699 * @param[in] U8 pbchSsRsSym
23703 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj
23706 RgSchDlRbAlloc *allocInfo,
23711 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo,pbchSsRsSym)
23713 RgSchDlRbAlloc *allocInfo;
23718 RgSchDlSf *dlSf = allocInfo->dlSf;
23719 U8 numOvrlapgPbchRb = 0;
23720 U8 numOvrlapgAdtlPbchRb = 0;
23722 U8 addtlRbsReq = 0;
23723 U8 moreAddtlRbsReq = 0;
23724 U8 addtlRbsAdd = 0;
23725 U8 moreAddtlRbsAdd = 0;
23733 TRC2(rgSCHCmnNonDlfsPbchRbAllocAdj);
23736 origRbsReq = allocInfo->rbsReq;
23737 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23739 totSym = (cell->isCpDlExtend) ? RGSCH_TOT_NUM_SYM_EXTCP : RGSCH_TOT_NUM_SYM_NORCP;
23741 /* Additional RBs are allocated by considering the loss due to
23742 the reserved symbols for CFICH, PBCH, PSS, SSS and cell specific RS */
23744 divResult = (numOvrlapgPbchRb * pbchSsRsSym)/totSym;
23745 if((numOvrlapgPbchRb * pbchSsRsSym) % totSym)
23749 addtlRbsReq = divResult;
23751 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, addtlRbsReq, addtlRbsAdd)
23753 /*Now RBs requires is original requested RBs + these additional RBs to make
23754 * up for PSS/SSS/BCCH.*/
23755 allocInfo->rbsReq = allocInfo->rbsReq + addtlRbsAdd;
23757 /*Check if with these additional RBs we have taken up, these are also falling
23758 * under PBCH RBs range, if yes then we would need to account for
23759 * PSS/BSS/BCCH for these additional RBs too.*/
23760 if(addtlRbsAdd && ((dlSf->bwAlloced + allocInfo->rbsReq - addtlRbsAdd) < (cell->pbchRbEnd)))
23762 if((dlSf->bwAlloced + allocInfo->rbsReq) <= (cell->pbchRbEnd))
23764 /*With additional RBs taken into account, we are not crossing the
23765 * PBCH RB end boundary.Thus here we need to account just for
23766 * overlapping PBCH RBs for these additonal RBs.*/
23767 divResult = (addtlRbsAdd * pbchSsRsSym)/totSym;
23768 if((addtlRbsAdd * pbchSsRsSym) % totSym)
23773 moreAddtlRbsReq = divResult;
23775 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23777 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23782 /*Here we have crossed the PBCH RB end boundary, thus we need to take
23783 * into account the overlapping RBs for additional RBs which will be
23784 * subset of addtlRbs.*/
23785 numOvrlapgAdtlPbchRb = (cell->pbchRbEnd) - ((dlSf->bwAlloced + allocInfo->rbsReq) - addtlRbsAdd);
23787 divResult = (numOvrlapgAdtlPbchRb * pbchSsRsSym)/totSym;
23788 if((numOvrlapgAdtlPbchRb * pbchSsRsSym) % totSym)
23793 moreAddtlRbsReq = divResult;
23795 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23797 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23800 if (isBcchPcch == TRUE)
23805 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23808 /* This case might be for Imcs value 6 and NPrb = 1 case - Not
23809 Adjusting either RBs or Imcs or Bytes Allocated */
23810 allocInfo->rbsReq = allocInfo->rbsReq - addtlRbsAdd - moreAddtlRbsAdd;
23812 else if(tbs && ((0 == addtlRbsAdd) && (moreAddtlRbsAdd == 0)))
23814 /*In case of a situation where we the entire bandwidth is already occupied
23815 * and we dont have room to add additional Rbs then in order to decrease the
23816 * code rate we reduce the tbsize such that we reduce the present calculated
23817 * tbsize by number of bytes that would be occupied by PBCH/PSS/SSS in overlapping
23818 * rbs and find the nearest tbsize which would be less than this deduced value*/
23820 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23822 noLyr = allocInfo->tbInfo[0].noLyr;
23823 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyr - 1], tbs);
23824 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23826 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,bytesReq);
23828 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23830 noLyr = allocInfo->tbInfo[1].noLyr;
23831 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23832 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,bytesReq);
23836 else if(tbs && ((addtlRbsAdd != addtlRbsReq) ||
23837 (addtlRbsAdd && (moreAddtlRbsReq != moreAddtlRbsAdd))))
23839 /*In case of a situation where we were not able to add required number of
23840 * additional RBs then we adjust the Imcs based on original RBs requested.
23841 * Doing this would comensate for the few extra Rbs we have added but inorder
23842 * to comensate for number of RBS we couldnt add we again do the TBSize adjustment*/
23844 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23846 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23848 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23851 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23852 numOvrlapgPbchRb = numOvrlapgPbchRb - (addtlRbsAdd + moreAddtlRbsAdd);
23854 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,allocInfo->tbInfo[0].bytesReq);
23856 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23858 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,allocInfo->tbInfo[1].bytesReq);
23864 /*We hit this code when we were able to add the required additional RBS
23865 * hence we should adjust the IMcs based on orignals RBs requested*/
23867 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23869 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23871 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23876 } /* end of rgSCHCmnNonDlfsPbchRbAllocAdj */
23880 * @brief Performs RB allocation for frequency non-selective cell.
23884 * Function : rgSCHCmnNonDlfsCmnRbAlloc
23886 * Processing Steps:
23887 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
23889 * @param[in] RgSchCellCb *cell
23890 * @param[in, out] RgSchDlRbAlloc *allocInfo
23896 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc
23899 RgSchDlRbAlloc *allocInfo
23902 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
23904 RgSchDlRbAlloc *allocInfo;
23910 U8 pbchSsRsSym = 0;
23913 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
23915 RgSchDlSf *dlSf = allocInfo->dlSf;
23918 U8 spsRbsAlloc = 0;
23919 RgSchDlSfAllocInfo *dlSfAlloc = &allocInfo->dlSf->dlSfAllocInfo;
23921 TRC2(rgSCHCmnNonDlfsCmnRbAlloc);
23923 allocInfo->tbInfo[0].noLyr = 1;
23926 /* Note: Initialize the masks to 0, this might not be needed since alloInfo
23927 * is initialized to 0 at the beginning of allcoation */
23928 allocInfo->resAllocInfo.raType0Mask = 0;
23929 cmMemset((U8*)allocInfo->resAllocInfo.raType1Mask, 0,
23930 RG_SCH_NUM_RATYPE1_32BIT_MASK * sizeof (U32));
23931 cmMemset((U8*)allocInfo->resAllocInfo.raType2Mask, 0,
23932 RG_SCH_NUM_RATYPE2_32BIT_MASK * sizeof (U32));
23934 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
23935 (dlSf->bwAlloced == dlSf->bw))
23937 if(dlSf->bwAlloced == dlSf->bw)
23943 if (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced))
23946 if ((allocInfo->tbInfo[0].imcs < 29) && (dlSf->bwAlloced < dlSf->bw))
23948 if(allocInfo->tbInfo[0].imcs < 29)
23951 /* set the remaining RBs for the requested UE */
23952 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
23953 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23954 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[0][tbs][allocInfo->rbsReq - 1]/8;
23959 /* Attempt RA Type 2 allocation in SPS Bandwidth */
23960 if (dlSf->spsAllocdBw < cell->spsBwRbgInfo.numRbs)
23963 rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23964 allocInfo->rbsReq, &cell->spsBwRbgInfo, &rbStart,
23965 &allocInfo->resAllocInfo, FALSE);
23966 /* rbsAlloc assignment moved from line 16671 to here to avoid
23967 * compilation error. Recheck */
23968 dlSf->spsAllocdBw += spsRbsAlloc;
23971 #endif /* LTEMAC_SPS */
23979 /* Update allocation information */
23980 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
23981 if (allocInfo->pdcch == NULLP)
23985 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
23986 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
23987 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
23988 allocInfo->allocInfo.raType2.isLocal = TRUE;
23992 allocInfo->allocInfo.raType2.rbStart = rbStart;
23993 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
23994 allocInfo->rbsAlloc = allocInfo->rbsReq;
24005 if(!(dlSf->sfNum == 5))
24007 /* case for subframes 1 to 9 except 5 */
24009 allocInfo->allocInfo.raType2.rbStart = rbStart;
24011 /*Fix for ccpu00123918*/
24012 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24017 pbchFrame = 1; /* case for subframe 5 */
24018 /* In subframe 5, symbols are reserved for PSS and SSS and CFICH
24019 and Cell Specific Reference Signals */
24020 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PSS_SSS_SYM) *
24021 RGSCH_NUM_SC_IN_RB + cell->numCellRSPerSf);
24027 /* In subframe 0, symbols are reserved for PSS, SSS, PBCH, CFICH and
24028 and Cell Specific Reference signals */
24029 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PBCH_SYM +
24030 RGSCH_NUM_PSS_SSS_SYM) * RGSCH_NUM_SC_IN_RB +
24031 cell->numCellRSPerSf);
24032 } /* end of outer else */
24035 (((dlSf->bwAlloced + allocInfo->rbsReq) - cell->pbchRbStart) > 0)&&
24036 (dlSf->bwAlloced < cell->pbchRbEnd))
24038 if(allocInfo->tbInfo[0].imcs < 29)
24040 rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo, pbchSsRsSym, TRUE);
24052 /*Fix for ccpu00123918*/
24053 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24054 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24055 allocInfo->rbsAlloc = allocInfo->rbsReq;
24057 /* LTE_ADV_FLAG_REMOVED_START */
24059 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
24061 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
24062 allocInfo->allocInfo.raType2.rbStart, \
24063 allocInfo->allocInfo.raType2.numRb);
24068 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
24069 allocInfo->allocInfo.raType2.rbStart, \
24070 allocInfo->allocInfo.raType2.numRb);
24076 /* LTE_ADV_FLAG_REMOVED_END */
24077 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24084 /* Update type 0, 1 and 2 masks */
24085 dlSfAlloc->raType0Mask |= allocInfo->resAllocInfo.raType0Mask;
24086 #ifdef RGSCH_SPS_UNUSED
24087 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
24089 dlSfAlloc->raType1Mask[idx] |=
24090 allocInfo->resAllocInfo.raType1Mask[idx];
24091 dlSfAlloc->raType1UsedRbs[idx] +=
24092 allocInfo->resAllocInfo.raType1UsedRbs[idx];
24095 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
24097 dlSfAlloc->raType2Mask[idx] |=
24098 allocInfo->resAllocInfo.raType2Mask[idx];
24108 * @brief Performs RB allocation for frequency non-selective cell.
24112 * Function : rgSCHCmnNonDlfsCmnRbAllocRar
24114 * Processing Steps:
24115 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
24117 * @param[in] RgSchCellCb *cell
24118 * @param[in, out] RgSchDlRbAlloc *allocInfo
24124 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAllocRar
24127 RgSchDlRbAlloc *allocInfo
24130 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
24132 RgSchDlRbAlloc *allocInfo;
24135 RgSchDlSf *dlSf = allocInfo->dlSf;
24136 TRC2(rgSCHCmnNonDlfsCmnRbAllocRar);
24139 if(dlSf->bwAlloced == dlSf->bw)
24144 allocInfo->tbInfo[0].noLyr = 1;
24146 /* Update allocation information */
24147 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
24148 if (allocInfo->pdcch == NULLP)
24152 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
24153 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
24154 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
24155 allocInfo->allocInfo.raType2.isLocal = TRUE;
24157 /*Fix for ccpu00123918*/
24158 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24159 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24160 allocInfo->rbsAlloc = allocInfo->rbsReq;
24162 /* LTE_ADV_FLAG_REMOVED_END */
24163 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24166 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, NULLP, dlSf, 13, TFU_DCI_FORMAT_B1, FALSE);
24167 if (allocInfo->pdcch == NULLP)
24171 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
24172 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
24174 printf("5GTF_ERROR vrbg allocated > 25\n");
24178 allocInfo->tbInfo[0].cmnGrnt.vrbgStart = beamInfo->vrbgStart;
24179 allocInfo->tbInfo[0].cmnGrnt.numVrbg = allocInfo->vrbgReq;
24181 /* Update allocation information */
24182 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
24184 allocInfo->tbInfo[0].cmnGrnt.xPDSCHRange = 1;
24185 allocInfo->tbInfo[0].cmnGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
24186 allocInfo->tbInfo[0].cmnGrnt.vrbgStart, allocInfo->tbInfo[0].cmnGrnt.numVrbg);
24188 allocInfo->tbInfo[0].cmnGrnt.rbStrt = (allocInfo->tbInfo[0].cmnGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
24189 allocInfo->tbInfo[0].cmnGrnt.numRb = (allocInfo->tbInfo[0].cmnGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
24191 beamInfo->vrbgStart += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24192 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24193 allocInfo->tbInfo[0].cmnGrnt.rv = 0;
24194 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24197 printf("\n[%s],allocInfo->tbInfo[0].bytesAlloc:%u,vrbgReq:%u\n",
24198 __func__,allocInfo->tbInfo[0].bytesAlloc,allocInfo->vrbgReq);
24204 /* LTE_ADV_FLAG_REMOVED_START */
24207 * @brief To check if DL BW available for non-DLFS allocation.
24211 * Function : rgSCHCmnNonDlfsBwAvlbl
24213 * Processing Steps:
24214 * - Determine availability based on RA Type.
24216 * @param[in] RgSchCellCb *cell
24217 * @param[in] RgSchDlSf *dlSf
24218 * @param[in] RgSchDlRbAlloc *allocInfo
24226 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl
24229 RgSchSFRPoolInfo **sfrpoolInfo,
24231 RgSchDlRbAlloc *allocInfo,
24235 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl(cell, sfrpoolInfo, dlSf, allocInfo, isUeCellEdge)
24237 RgSchSFRPoolInfo **sfrpoolInfo;
24239 RgSchDlRbAlloc *allocInfo;
24247 RgSchSFRPoolInfo *sfrPool;
24248 RgSchSFRPoolInfo *sfrCEPool;
24252 RgSchSFRPoolInfo *poolWithMaxAvlblBw = NULLP;
24254 U32 addtnlPRBs = 0;
24256 if (dlSf->bw <= dlSf->bwAlloced)
24258 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
24259 "BW is fully allocated for subframe (%d) CRNTI:%d", dlSf->sfNum,allocInfo->rnti);
24263 if (dlSf->sfrTotalPoolInfo.ccBwFull == TRUE)
24265 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24266 "BW is fully allocated for CC Pool CRNTI:%d",allocInfo->rnti);
24270 if ((dlSf->sfrTotalPoolInfo.ceBwFull == TRUE) && (isUeCellEdge))
24272 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24273 "BW is fully allocated for CE Pool CRNTI:%d",allocInfo->rnti);
24277 /* We first check if the ue scheduled is a cell edge or cell centre and accordingly check the avaialble
24278 memory in their pool. If the cell centre UE doesnt have Bw available in its pool, then it will check
24279 Bw availability in cell edge pool but the other way around is NOT possible. */
24282 l = &dlSf->sfrTotalPoolInfo.cePool;
24286 l = &dlSf->sfrTotalPoolInfo.ccPool;
24289 n = cmLListFirst(l);
24293 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24295 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24297 /* MS_FIX for ccpu00123919 : Number of RBs in case of RETX should be same as that of initial transmission. */
24298 if(allocInfo->tbInfo[0].tbCb->txCntr)
24300 /* If RB assignment is being done for RETX. Then if reqRbs are a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24301 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24302 if (allocInfo->rbsReq % cell->rbgSize == 0)
24304 if ((sfrPool->type2End == dlSf->type2End) && dlSf->lstRbgDfct)
24306 /* In this scenario we are wasting the last RBG for this dlSf */
24307 sfrPool->type0End--;
24308 sfrPool->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24310 dlSf->lstRbgDfct = 0;
24312 /*ABHINAV To check if these variables need to be taken care of*/
24314 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24319 if (dlSf->lstRbgDfct)
24321 /* Check if type0 allocation can cater to this RETX requirement */
24322 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24328 if (sfrPool->type2End != dlSf->type2End) /*Search again for some pool which has the END RBG of the BandWidth*/
24336 /* cannot allocate same number of required RBs */
24342 /*rg002.301 ccpu00120391 MOD condition is modified approprialtely to find if rbsReq is less than available RBS*/
24343 if(allocInfo->rbsReq <= (((sfrPool->type0End - sfrPool->type2End + 1)*\
24344 cell->rbgSize) - dlSf->lstRbgDfct))
24346 *sfrpoolInfo = sfrPool;
24351 if (sfrPool->bw <= sfrPool->bwAlloced + cell->rbgSize)
24353 n = cmLListNext(l);
24354 /* If the ue is cell centre then it will simply check the memory available in next pool.
24355 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24357 if((!isUeCellEdge) && (!n->node))
24359 l = &dlSf->sfrTotalPoolInfo.cePool;
24360 n = cmLListFirst(l);
24366 /* MS_FIX: Number of RBs in case of RETX should be same as that of initial transmission */
24367 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24369 /*rg002.301 ccpu00120391 MOD setting the remaining RBs for the requested UE*/
24370 allocInfo->rbsReq = (((sfrPool->type0End - sfrPool->type2End + 1)*\
24371 cell->rbgSize) - dlSf->lstRbgDfct);
24372 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24373 noLyrs = allocInfo->tbInfo[0].noLyr;
24374 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24375 *sfrpoolInfo = sfrPool;
24380 n = cmLListNext(l);
24382 /* If the ue is cell centre then it will simply check the memory available in next pool.
24383 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24384 if((!isUeCellEdge) && (!n->node))
24386 l = &dlSf->sfrTotalPoolInfo.cePool;
24387 n = cmLListFirst(l);
24393 // RETVALUE(FALSE);
24396 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24398 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24399 /* This is a Case where a UE was CC and had more RBs allocated than present in CE pool.
24400 In case this UE whn become CE with retx going on, then BW is not sufficient for Retx */
24401 if ((isUeCellEdge) &&
24402 (allocInfo->tbInfo[0].tbCb->txCntr != 0))
24404 if(allocInfo->rbsReq > (sfrPool->bw - sfrPool->bwAlloced))
24406 /* Adjust CE BW such that Retx alloc is successful */
24407 /* Check if merging CE with adjacent CC pool will be sufficient to process Retx */
24409 /* If no Type 0 allocations are made from this pool */
24410 if (sfrPool->type0End == (((sfrPool->poolendRB + 1) / cell->rbgSize) - 1))
24412 if (sfrPool->adjCCPool &&
24413 (sfrPool->adjCCPool->type2Start == sfrPool->poolendRB + 1) &&
24414 (allocInfo->rbsReq <= ((sfrPool->bw - sfrPool->bwAlloced) +
24415 ((sfrPool->adjCCPool->bw - sfrPool->adjCCPool->bwAlloced)))))
24417 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24419 /* Adjusting CE Pool Info */
24420 sfrPool->bw += addtnlPRBs;
24421 sfrPool->type0End = ((sfrPool->poolendRB + addtnlPRBs + 1) /
24422 cell->rbgSize) - 1;
24424 /* Adjusting CC Pool Info */
24425 sfrPool->adjCCPool->type2Start += addtnlPRBs;
24426 sfrPool->adjCCPool->type2End = RGSCH_CEIL(sfrPool->adjCCPool->type2Start,
24428 sfrPool->adjCCPool->bw -= addtnlPRBs;
24429 *sfrpoolInfo = sfrPool;
24436 /* Check if CC pool is one of the following:
24437 * 1. |CE| + |CC "CCPool2Exists" = TRUE|
24438 * 2. |CC "CCPool2Exists" = FALSE| + |CE| + |CC "CCPool2Exists" = TRUE|
24440 if(TRUE == sfrPool->CCPool2Exists)
24442 l1 = &dlSf->sfrTotalPoolInfo.cePool;
24443 n1 = cmLListFirst(l1);
24444 sfrCEPool = (RgSchSFRPoolInfo*)(n1->node);
24445 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced))
24447 *sfrpoolInfo = sfrCEPool;
24450 else if(allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24452 *sfrpoolInfo = sfrPool;
24455 /* Check if CE and CC boundary has unallocated prbs */
24456 else if ((sfrPool->poolstartRB == sfrPool->type2Start) &&
24457 (sfrCEPool->type0End == ((sfrCEPool->poolendRB + 1) / cell->rbgSize) - 1))
24459 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced) +
24460 (sfrPool->bw - sfrPool->bwAlloced))
24462 /* Checking if BW can be allocated partly from CE pool and partly
24465 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24466 /* Updating CE and CC type2 parametrs based on the RBs allocated
24467 * from these pools*/
24468 sfrPool->type2Start -= addtnlPRBs;
24469 sfrPool->type2End = RGSCH_CEIL(sfrPool->type2Start, cell->rbgSize);
24470 sfrPool->bw += addtnlPRBs;
24471 if (addtnlPRBs == (sfrCEPool->bw - sfrCEPool->bwAlloced))
24473 sfrCEPool->bwAlloced = sfrCEPool->bw;
24474 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24478 sfrCEPool->bw -= addtnlPRBs;
24479 sfrCEPool->type0End = ((sfrCEPool->poolendRB + 1 - addtnlPRBs) / cell->rbgSize) - 1;
24481 *sfrpoolInfo = sfrPool;
24484 else if ( bwAvlbl <
24485 ((sfrCEPool->bw - sfrCEPool->bwAlloced) +
24486 (sfrPool->bw - sfrPool->bwAlloced)))
24488 /* All the Prbs from CE BW shall be allocated */
24489 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24491 sfrPool->type2Start = sfrCEPool->type2Start;
24492 sfrPool->bw += sfrCEPool->bw - sfrCEPool->bwAlloced;
24493 sfrCEPool->type2Start = sfrCEPool->poolendRB + 1;
24494 sfrCEPool->bwAlloced = sfrCEPool->bw;
24495 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24497 /* set the remaining RBs for the requested UE */
24498 allocInfo->rbsReq = (sfrPool->bw - sfrPool->bwAlloced);
24499 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24500 noLyrs = allocInfo->tbInfo[0].noLyr;
24501 allocInfo->tbInfo[0].bytesReq =
24502 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24503 *sfrpoolInfo = sfrPool;
24514 /* Checking if no. of RBs required can be allocated from
24516 * 1. If available return the SFR pool.
24517 * 2. Else update the RBs required parameter based on the
24518 * BW available in the pool
24519 * 3. Return FALSE if no B/W is available.
24521 if (allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24523 *sfrpoolInfo = sfrPool;
24528 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24530 if (bwAvlbl < sfrPool->bw - sfrPool->bwAlloced)
24534 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24536 bwAvlbl = sfrPool->bw - sfrPool->bwAlloced;
24537 poolWithMaxAvlblBw = sfrPool;
24539 n = cmLListNext(l);
24541 if ((isUeCellEdge == FALSE) && (n == NULLP))
24543 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24545 l = &dlSf->sfrTotalPoolInfo.cePool;
24546 n = cmLListFirst(l);
24556 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24560 dlSf->sfrTotalPoolInfo.ccBwFull = TRUE;
24566 /* set the remaining RBs for the requested UE */
24567 allocInfo->rbsReq = poolWithMaxAvlblBw->bw -
24568 poolWithMaxAvlblBw->bwAlloced;
24569 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24570 noLyrs = allocInfo->tbInfo[0].noLyr;
24571 allocInfo->tbInfo[0].bytesReq =
24572 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24573 *sfrpoolInfo = poolWithMaxAvlblBw;
24580 n = cmLListNext(l);
24582 if ((isUeCellEdge == FALSE) && (n == NULLP))
24584 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24586 l = &dlSf->sfrTotalPoolInfo.cePool;
24587 n = cmLListFirst(l);
24603 #endif /* end of ifndef LTE_TDD*/
24604 /* LTE_ADV_FLAG_REMOVED_END */
24607 * @brief To check if DL BW available for non-DLFS allocation.
24611 * Function : rgSCHCmnNonDlfsUeRbAlloc
24613 * Processing Steps:
24614 * - Determine availability based on RA Type.
24616 * @param[in] RgSchCellCb *cell
24617 * @param[in] RgSchDlSf *dlSf
24618 * @param[in] RgSchDlRbAlloc *allocInfo
24626 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl
24630 RgSchDlRbAlloc *allocInfo
24633 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl(cell, dlSf, allocInfo)
24636 RgSchDlRbAlloc *allocInfo;
24641 U8 ignoredDfctRbg = FALSE;
24643 TRC2(rgSCHCmnNonDlfsBwAvlbl);
24644 if (dlSf->bw <= dlSf->bwAlloced)
24646 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId, "(%d:%d)FAILED CRNTI:%d",
24647 dlSf->bw, dlSf->bwAlloced,allocInfo->rnti);
24650 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24652 /* Fix for ccpu00123919 : Number of RBs in case of RETX should be same as
24653 * that of initial transmission. */
24654 if(allocInfo->tbInfo[0].tbCb->txCntr)
24656 /* If RB assignment is being done for RETX. Then if reqRbs are
24657 * a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24658 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24659 if (allocInfo->rbsReq % cell->rbgSize == 0)
24661 if (dlSf->lstRbgDfct)
24663 /* In this scenario we are wasting the last RBG for this dlSf */
24666 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24667 /* Fix: MUE_PERTTI_DL */
24668 dlSf->lstRbgDfct = 0;
24669 ignoredDfctRbg = TRUE;
24675 if (dlSf->lstRbgDfct)
24677 /* Check if type0 allocation can cater to this RETX requirement */
24678 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24685 /* cannot allocate same number of required RBs */
24691 /* Condition is modified approprialtely to find
24692 * if rbsReq is less than available RBS*/
24693 if(allocInfo->rbsReq <= (((dlSf->type0End - dlSf->type2End + 1)*\
24694 cell->rbgSize) - dlSf->lstRbgDfct))
24698 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24699 * allocation in TDD when requested RBs are more than available RBs*/
24702 /* MS_WORKAROUND for ccpu00122022 */
24703 if (dlSf->bw < dlSf->bwAlloced + cell->rbgSize)
24705 /* ccpu00132358- Re-assigning the values which were updated above
24706 * if it is RETX and Last RBG available*/
24707 if(ignoredDfctRbg == TRUE)
24710 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24711 dlSf->lstRbgDfct = 1;
24717 /* Fix: Number of RBs in case of RETX should be same as
24718 * that of initial transmission. */
24719 if(allocInfo->tbInfo[0].tbCb->txCntr == 0
24721 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24725 /* Setting the remaining RBs for the requested UE*/
24726 allocInfo->rbsReq = (((dlSf->type0End - dlSf->type2End + 1)*\
24727 cell->rbgSize) - dlSf->lstRbgDfct);
24728 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24729 noLyrs = allocInfo->tbInfo[0].noLyr;
24730 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24731 /* DwPts Scheduling Changes Start */
24733 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24735 allocInfo->tbInfo[0].bytesReq =
24736 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24739 /* DwPts Scheduling Changes End */
24743 /* ccpu00132358- Re-assigning the values which were updated above
24744 * if it is RETX and Last RBG available*/
24745 if(ignoredDfctRbg == TRUE)
24748 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24749 dlSf->lstRbgDfct = 1;
24752 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "FAILED for CRNTI:%d",
24754 printf ("RB Alloc failed for LAA TB type 0\n");
24760 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24762 if (allocInfo->rbsReq <= (dlSf->bw - dlSf->bwAlloced))
24766 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24767 * allocation in TDD when requested RBs are more than available RBs*/
24770 /* Fix: Number of RBs in case of RETX should be same as
24771 * that of initial transmission. */
24772 if((allocInfo->tbInfo[0].tbCb->txCntr == 0)
24774 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24778 /* set the remaining RBs for the requested UE */
24779 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
24780 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24781 noLyrs = allocInfo->tbInfo[0].noLyr;
24782 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24783 /* DwPts Scheduling Changes Start */
24785 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24787 allocInfo->tbInfo[0].bytesReq =
24788 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24791 /* DwPts Scheduling Changes End */
24795 printf ("RB Alloc failed for LAA TB type 2\n");
24796 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24799 /* Fix: Number of RBs in case of RETX should be same as
24800 * that of initial transmission. */
24804 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24808 /* LTE_ADV_FLAG_REMOVED_START */
24811 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24815 * Function : rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24817 * Processing Steps:
24819 * @param[in] RgSchCellCb *cell
24820 * @param[in] RgSchDlSf *dlSf
24821 * @param[in] U8 rbStrt
24822 * @param[in] U8 numRb
24827 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24835 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
24844 RgSchSFRPoolInfo *sfrPool;
24845 TRC2(rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc);
24847 l = &dlSf->sfrTotalPoolInfo.ccPool;
24849 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24850 dlSf->bwAlloced += numRb;
24851 dlSf->type2Start += numRb;
24852 n = cmLListFirst(l);
24856 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24857 n = cmLListNext(l);
24859 /* If the pool contains some RBs allocated in this allocation, e.g: Pool is [30.50]. Pool->type2Start is 40 , dlSf->type2Start is 45. then update the variables in pool */
24860 if((sfrPool->poolendRB >= dlSf->type2Start) && (sfrPool->type2Start < dlSf->type2Start))
24862 sfrPool->type2End = dlSf->type2End;
24863 sfrPool->bwAlloced = dlSf->type2Start - sfrPool->poolstartRB;
24864 sfrPool->type2Start = dlSf->type2Start;
24868 /* If the pool contains all RBs allocated in this allocation*/
24869 if(dlSf->type2Start > sfrPool->poolendRB)
24871 sfrPool->type2End = sfrPool->type0End + 1;
24872 sfrPool->bwAlloced = sfrPool->bw;
24873 sfrPool->type2Start = sfrPool->poolendRB + 1;
24878 if (l != &dlSf->sfrTotalPoolInfo.cePool)
24880 l = &dlSf->sfrTotalPoolInfo.cePool;
24881 n = cmLListFirst(l);
24891 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24895 * Function : rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24897 * Processing Steps:
24899 * @param[in] RgSchCellCb *cell
24900 * @param[in] RgSchDlSf *dlSf
24901 * @param[in] U8 rbStrt
24902 * @param[in] U8 numRb
24908 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24917 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc(cell, ue, dlSf, rbStrt, numRb)
24927 RgSchSFRPoolInfo *sfrCCPool1 = NULL;
24928 RgSchSFRPoolInfo *sfrCCPool2 = NULL;
24931 TRC2(rgSCHCmnNonDlfsUpdDSFRTyp2Alloc);
24932 /* Move the type2End pivot forward */
24935 l = &dlSf->sfrTotalPoolInfo.ccPool;
24936 n = cmLListFirst(l);
24939 sfrCCPool1 = (RgSchSFRPoolInfo*)(n->node);
24941 if (sfrCCPool1 == NULLP)
24943 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24944 "sfrCCPool1 is NULL for CRNTI:%d",ue->ueId);
24947 n = cmLListNext(l);
24950 sfrCCPool2 = (RgSchSFRPoolInfo*)(n->node);
24951 n = cmLListNext(l);
24953 if((sfrCCPool1) && (sfrCCPool2))
24955 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
24956 if(((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24957 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb)) ||
24958 ((dlSf->type2Start >= sfrCCPool2->pwrHiCCRange.startRb) &&
24959 (dlSf->type2Start + numRb < sfrCCPool2->pwrHiCCRange.endRb)))
24961 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24963 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24964 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24967 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24968 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
24975 if((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24976 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb))
24978 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24980 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24981 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24984 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24985 "rgSCHCmnBuildRntpInfo() function returned RFAILED CRNTI:%d",ue->ueId);
24991 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24993 dlSf->bwAlloced += numRb;
24994 /*MS_FIX for ccpu00123918*/
24995 dlSf->type2Start += numRb;
25001 #endif /* end of ifndef LTE_TDD*/
25002 /* LTE_ADV_FLAG_REMOVED_END */
25004 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
25008 * Function : rgSCHCmnNonDlfsUpdTyp2Alloc
25010 * Processing Steps:
25012 * @param[in] RgSchCellCb *cell
25013 * @param[in] RgSchDlSf *dlSf
25014 * @param[in] U8 rbStrt
25015 * @param[in] U8 numRb
25020 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc
25028 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
25035 TRC2(rgSCHCmnNonDlfsUpdTyp2Alloc);
25036 /* Move the type2End pivot forward */
25037 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25038 //#ifndef LTEMAC_SPS
25039 dlSf->bwAlloced += numRb;
25040 /*Fix for ccpu00123918*/
25041 dlSf->type2Start += numRb;
25047 * @brief To do DL allocation using TYPE0 RA.
25051 * Function : rgSCHCmnNonDlfsType0Alloc
25053 * Processing Steps:
25054 * - Perform TYPE0 allocation using the RBGs between
25055 * type0End and type2End.
25056 * - Build the allocation mask as per RBG positioning.
25057 * - Update the allocation parameters.
25059 * @param[in] RgSchCellCb *cell
25060 * @param[in] RgSchDlSf *dlSf
25061 * @param[in] RgSchDlRbAlloc *allocInfo
25067 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
25071 RgSchDlRbAlloc *allocInfo,
25075 PRIVATE Void rgSCHCmnNonDlfsType0Alloc(cell, dlSf, allocInfo, dlUe)
25078 RgSchDlRbAlloc *allocInfo;
25082 U32 dlAllocMsk = 0;
25083 U8 rbgFiller = dlSf->lstRbgDfct;
25084 U8 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25085 //U8 noRbgs = (allocInfo->rbsReq + rbgFiller)/ cell->rbgSize;
25089 U32 tb1BytesAlloc = 0;
25090 U32 tb2BytesAlloc = 0;
25091 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25093 TRC2(rgSCHCmnNonDlfsType0Alloc);
25094 //if(noRbgs == 0) noRbgs = 1; /* Not required as ceilling is used above*/
25096 /* Fix for ccpu00123919*/
25097 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25098 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25104 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25107 /* Fix for ccpu00138701: Ceilling is using to derive num of RBGs, Therefore,
25108 * after this operation,checking Max TB size and Max RBs are not crossed
25109 * if it is crossed then decrement num of RBGs. */
25110 //if((noRbs + rbgFiller) % cell->rbgSize)
25111 if((noRbs > allocInfo->rbsReq) &&
25112 (allocInfo->rbsReq + rbgFiller) % cell->rbgSize)
25113 {/* considering ue category limitation
25114 * due to ceiling */
25117 if (rgSCHLaaIsLaaTB(allocInfo)== FALSE)
25120 if ((allocInfo->tbInfo[0].schdlngForTb) && (!allocInfo->tbInfo[0].tbCb->txCntr))
25122 iTbs = allocInfo->tbInfo[0].iTbs;
25123 noLyr = allocInfo->tbInfo[0].noLyr;
25124 tb1BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25127 if ((allocInfo->tbInfo[1].schdlngForTb) && (!allocInfo->tbInfo[1].tbCb->txCntr))
25129 iTbs = allocInfo->tbInfo[1].iTbs;
25130 noLyr = allocInfo->tbInfo[1].noLyr;
25131 tb2BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25135 /* Only Check for New Tx No need for Retx */
25136 if (tb1BytesAlloc || tb2BytesAlloc)
25138 if (( ue->dl.aggTbBits >= dlUe->maxTbBits) ||
25139 (tb1BytesAlloc >= dlUe->maxTbSz/8) ||
25140 (tb2BytesAlloc >= dlUe->maxTbSz/8) ||
25141 (noRbs >= dlUe->maxRb))
25147 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25151 /* type0End would have been initially (during subfrm Init) at the bit position
25152 * (cell->noOfRbgs - 1), 0 being the most significant.
25153 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25154 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - dlSf->type0End));
25155 /* Move backwards the type0End pivot */
25156 dlSf->type0End -= noRbgs;
25157 /*Fix for ccpu00123919*/
25158 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25159 /* Update the bwAlloced field accordingly */
25160 //#ifndef LTEMAC_SPS /* ccpu00129474*/
25161 dlSf->bwAlloced += noRbs;
25163 /* Update Type0 Alloc Info */
25164 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25165 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25166 allocInfo->rbsAlloc = noRbs;
25168 /* Update Tb info for each scheduled TB */
25169 iTbs = allocInfo->tbInfo[0].iTbs;
25170 noLyr = allocInfo->tbInfo[0].noLyr;
25171 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25172 * RETX TB Size is same as Init TX TB Size */
25173 if (allocInfo->tbInfo[0].tbCb->txCntr)
25175 allocInfo->tbInfo[0].bytesAlloc =
25176 allocInfo->tbInfo[0].bytesReq;
25180 allocInfo->tbInfo[0].bytesAlloc =
25181 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25182 /* DwPts Scheduling Changes Start */
25184 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25186 allocInfo->tbInfo[0].bytesAlloc =
25187 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25190 /* DwPts Scheduling Changes End */
25193 if (allocInfo->tbInfo[1].schdlngForTb)
25195 iTbs = allocInfo->tbInfo[1].iTbs;
25196 noLyr = allocInfo->tbInfo[1].noLyr;
25197 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25198 * RETX TB Size is same as Init TX TB Size */
25199 if (allocInfo->tbInfo[1].tbCb->txCntr)
25201 allocInfo->tbInfo[1].bytesAlloc =
25202 allocInfo->tbInfo[1].bytesReq;
25206 allocInfo->tbInfo[1].bytesAlloc =
25207 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25208 /* DwPts Scheduling Changes Start */
25210 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25212 allocInfo->tbInfo[1].bytesAlloc =
25213 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25216 /* DwPts Scheduling Changes End */
25220 /* The last RBG which can be smaller than the RBG size is consedered
25221 * only for the first time allocation of TYPE0 UE */
25222 dlSf->lstRbgDfct = 0;
25229 * @brief To prepare RNTP value from the PRB allocation (P-High -> 1 and P-Low -> 0)
25233 * Function : rgSCHCmnBuildRntpInfo
25235 * Processing Steps:
25237 * @param[in] U8 *rntpPtr
25238 * @param[in] U8 startRb
25239 * @param[in] U8 numRb
25245 PRIVATE S16 rgSCHCmnBuildRntpInfo
25254 PRIVATE S16 rgSCHCmnBuildRntpInfo(cell, rntpPtr, startRb, nmbRb, bw)
25262 U16 rbPtrStartIdx; /* Start Index of Octete Buffer to be filled */
25263 U16 rbPtrEndIdx; /* End Index of Octete Buffer to be filled */
25264 U16 rbBitLoc; /* Bit Location to be set as 1 in the current Byte */
25265 U16 nmbRbPerByte; /* PRB's to be set in the current Byte (in case of multiple Bytes) */
25267 TRC2(rgSCHCmnBuildRntpInfo);
25269 rbPtrStartIdx = (startRb)/8;
25270 rbPtrEndIdx = (startRb + nmbRb)/8;
25272 if (rntpPtr == NULLP)
25274 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
25275 "rgSCHCmnBuildRntpInfo():"
25276 "rntpPtr can't be NULLP (Memory Allocation Failed)");
25280 while(rbPtrStartIdx <= rbPtrEndIdx)
25282 rbBitLoc = (startRb)%8;
25284 /* case 1: startRb and endRb lies in same Byte */
25285 if (rbPtrStartIdx == rbPtrEndIdx)
25287 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25288 | (((1<<nmbRb)-1)<<rbBitLoc);
25291 /* case 2: startRb and endRb lies in different Byte */
25292 if (rbPtrStartIdx != rbPtrEndIdx)
25294 nmbRbPerByte = 8 - rbBitLoc;
25295 nmbRb = nmbRb - nmbRbPerByte;
25296 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25297 | (((1<<nmbRbPerByte)-1)<<rbBitLoc);
25298 startRb = startRb + nmbRbPerByte;
25304 /* dsfr_pal_fixes ** 21-March-2013 ** SKS ** Adding Debug logs */
25306 /* dsfr_pal_fixes ** 25-March-2013 ** SKS ** Adding Debug logs to print RNTP */
25313 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
25317 * Function : rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25319 * Processing Steps:
25321 * @param[in] RgSchCellCb *cell
25322 * @param[in] RgSchDlSf *dlSf
25323 * @param[in] U8 rbStrt
25324 * @param[in] U8 numRb
25330 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25335 RgSchSFRPoolInfo *sfrPool,
25340 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrPool, rbStrt, numRb)
25344 RgSchSFRPoolInfo *sfrPool;
25353 TRC2(rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc);
25354 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25355 sfrPool->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25358 dlSf->type2Start += numRb;
25359 dlSf->bwAlloced += numRb;
25361 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
25363 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
25364 if(FALSE == ue->lteAdvUeCb.rgrLteAdvUeCfg.isUeCellEdge)
25366 if((sfrPool->type2Start >= sfrPool->pwrHiCCRange.startRb) &&
25367 (sfrPool->type2Start + numRb < sfrPool->pwrHiCCRange.endRb))
25369 ue->lteAdvUeCb.isCCUePHigh = TRUE;
25371 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25372 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25375 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25376 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25383 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25384 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25387 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25388 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25393 sfrPool->type2Start += numRb;
25394 sfrPool->bwAlloced += numRb;
25401 * @brief To do DL allocation using TYPE0 RA.
25405 * Function : rgSCHCmnNonDlfsSFRPoolType0Alloc
25407 * Processing Steps:
25408 * - Perform TYPE0 allocation using the RBGs between type0End and type2End.
25409 * - Build the allocation mask as per RBG positioning.
25410 * - Update the allocation parameters.
25412 * @param[in] RgSchCellCb *cell
25413 * @param[in] RgSchDlSf *dlSf
25414 * @param[in] RgSchDlRbAlloc *allocInfo
25420 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc
25424 RgSchSFRPoolInfo *poolInfo,
25425 RgSchDlRbAlloc *allocInfo
25428 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, poolInfo, allocInfo)
25431 RgSchSFRPoolInfo *poolInfo;
25432 RgSchDlRbAlloc *allocInfo;
25435 U32 dlAllocMsk = 0;
25442 TRC2(rgSCHCmnNonDlfsSFRPoolType0Alloc);
25444 if (poolInfo->poolstartRB + poolInfo->bw == dlSf->bw)
25446 if (poolInfo->type0End == dlSf->bw/4)
25448 rbgFiller = dlSf->lstRbgDfct;
25449 /* The last RBG which can be smaller than the RBG size is consedered
25450 * only for the first time allocation of TYPE0 UE */
25451 dlSf->lstRbgDfct = 0;
25455 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25457 /* Abhinav to-do start */
25458 /* MS_FIX for ccpu00123919*/
25459 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25460 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25466 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25468 /* Abhinav to-do end */
25472 /* type0End would have been initially (during subfrm Init) at the bit position
25473 * (cell->noOfRbgs - 1), 0 being the most significant.
25474 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25475 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - poolInfo->type0End));
25476 /* Move backwards the type0End pivot */
25477 poolInfo->type0End -= noRbgs;
25478 /*MS_FIX for ccpu00123919*/
25479 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25480 /* Update the bwAlloced field accordingly */
25481 poolInfo->bwAlloced += noRbs + dlSf->lstRbgDfct;
25482 dlSf->bwAlloced += noRbs + dlSf->lstRbgDfct;
25484 /* Update Type0 Alloc Info */
25485 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25486 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25487 allocInfo->rbsAlloc = noRbs;
25489 /* Update Tb info for each scheduled TB */
25490 iTbs = allocInfo->tbInfo[0].iTbs;
25491 noLyr = allocInfo->tbInfo[0].noLyr;
25492 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25493 * RETX TB Size is same as Init TX TB Size */
25494 if (allocInfo->tbInfo[0].tbCb->txCntr)
25496 allocInfo->tbInfo[0].bytesAlloc =
25497 allocInfo->tbInfo[0].bytesReq;
25501 allocInfo->tbInfo[0].bytesAlloc =
25502 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25505 if (allocInfo->tbInfo[1].schdlngForTb)
25507 iTbs = allocInfo->tbInfo[1].iTbs;
25508 noLyr = allocInfo->tbInfo[1].noLyr;
25509 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25510 * RETX TB Size is same as Init TX TB Size */
25511 if (allocInfo->tbInfo[1].tbCb->txCntr)
25513 allocInfo->tbInfo[1].bytesAlloc =
25514 allocInfo->tbInfo[1].bytesReq;
25518 allocInfo->tbInfo[1].bytesAlloc =
25519 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25523 /* The last RBG which can be smaller than the RBG size is consedered
25524 * only for the first time allocation of TYPE0 UE */
25525 dlSf->lstRbgDfct = 0;
25530 * @brief Computes RNTP Info for a subframe.
25534 * Function : rgSCHCmnNonDlfsDsfrRntpComp
25536 * Processing Steps:
25537 * - Computes RNTP info from individual pools.
25539 * @param[in] RgSchDlSf *dlSf
25545 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp
25551 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp(cell, dlSf)
25556 PRIVATE U16 samples = 0;
25558 U16 bwBytes = (dlSf->bw-1)/8;
25559 RgrLoadInfIndInfo *rgrLoadInf;
25563 TRC2(rgSCHCmnNonDlfsDsfrRntpComp);
25565 len = (dlSf->bw % 8 == 0) ? dlSf->bw/8 : dlSf->bw/8 + 1;
25567 /* RNTP info is ORed every TTI and the sample is stored in cell control block */
25568 for(i = 0; i <= bwBytes; i++)
25570 cell->rntpAggrInfo.val[i] |= dlSf->rntpInfo.val[i];
25572 samples = samples + 1;
25573 /* After every 1000 ms, the RNTP info will be sent to application to be further sent to all neighbouring eNB
25574 informing them about the load indication for cell edge users */
25575 if(RG_SCH_MAX_RNTP_SAMPLES == samples)
25578 ret = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&rgrLoadInf,
25579 sizeof(RgrLoadInfIndInfo));
25582 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
25583 "allocate memory for sending LoadInfo");
25587 rgrLoadInf->u.rntpInfo.pres = cell->rntpAggrInfo.pres;
25588 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25589 rgrLoadInf->u.rntpInfo.len = len;
25591 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25592 rgrLoadInf->u.rntpInfo.val = cell->rntpAggrInfo.val;
25593 rgrLoadInf->cellId = cell->cellId;
25595 /* dsfr_pal_fixes ** 22-March-2013 ** SKS */
25596 rgrLoadInf->bw = dlSf->bw;
25597 rgrLoadInf->type = RGR_SFR;
25599 ret = rgSCHUtlRgrLoadInfInd(cell, rgrLoadInf);
25602 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsDsfrRntpComp():"
25603 "rgSCHUtlRgrLoadInfInd() returned RFAILED");
25606 cmMemset(cell->rntpAggrInfo.val,0,len);
25610 /* LTE_ADV_FLAG_REMOVED_END */
25612 /* LTE_ADV_FLAG_REMOVED_START */
25614 * @brief Performs RB allocation per UE from a pool.
25618 * Function : rgSCHCmnSFRNonDlfsUeRbAlloc
25620 * Processing Steps:
25621 * - Allocate consecutively available RBs.
25623 * @param[in] RgSchCellCb *cell
25624 * @param[in] RgSchUeCb *ue
25625 * @param[in] RgSchDlSf *dlSf
25626 * @param[out] U8 *isDlBwAvail
25634 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc
25642 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25649 RgSchDlRbAlloc *allocInfo;
25650 RgSchCmnDlUe *dlUe;
25652 RgSchSFRPoolInfo *sfrpoolInfo = NULLP;
25654 TRC2(rgSCHCmnSFRNonDlfsUeRbAlloc);
25656 isUECellEdge = RG_SCH_CMN_IS_UE_CELL_EDGE(ue);
25658 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25659 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25660 *isDlBwAvail = TRUE;
25662 /*Find which pool is available for this UE*/
25663 if (rgSCHCmnNonDlfsSFRBwAvlbl(cell, &sfrpoolInfo, dlSf, allocInfo, isUECellEdge) != TRUE)
25665 /* SFR_FIX - If this is CE UE there may be BW available in CC Pool
25666 So CC UEs will be scheduled */
25669 *isDlBwAvail = TRUE;
25673 *isDlBwAvail = FALSE;
25678 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX || dlUe->proc->tbInfo[1].isAckNackDtx)
25680 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25684 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25687 if (!(allocInfo->pdcch))
25689 /* Returning ROK since PDCCH might be available for another UE and further allocations could be done */
25694 allocInfo->rnti = ue->ueId;
25697 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
25699 allocInfo->allocInfo.raType2.isLocal = TRUE;
25700 /* rg004.201 patch - ccpu00109921 fix end */
25701 /* MS_FIX for ccpu00123918*/
25702 allocInfo->allocInfo.raType2.rbStart = (U8)sfrpoolInfo->type2Start;
25703 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25704 /* rg007.201 - Changes for MIMO feature addition */
25705 /* rg008.201 - Removed dependency on MIMO compile-time flag */
25706 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrpoolInfo, \
25707 allocInfo->allocInfo.raType2.rbStart, \
25708 allocInfo->allocInfo.raType2.numRb);
25709 allocInfo->rbsAlloc = allocInfo->rbsReq;
25710 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25712 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
25714 rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, sfrpoolInfo, allocInfo);
25718 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,0);
25719 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
25721 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,1);
25726 #if defined(LTEMAC_SPS)
25727 /* Update the sub-frame with new allocation */
25728 dlSf->bwAlloced += allocInfo->rbsReq;
25734 /* LTE_ADV_FLAG_REMOVED_END */
25735 #endif /* LTE_TDD */
25738 * @brief Performs RB allocation per UE for frequency non-selective cell.
25742 * Function : rgSCHCmnNonDlfsUeRbAlloc
25744 * Processing Steps:
25745 * - Allocate consecutively available RBs.
25747 * @param[in] RgSchCellCb *cell
25748 * @param[in] RgSchUeCb *ue
25749 * @param[in] RgSchDlSf *dlSf
25750 * @param[out] U8 *isDlBwAvail
25757 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc
25765 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25772 RgSchDlRbAlloc *allocInfo;
25773 RgSchCmnDlUe *dlUe;
25777 TRC2(rgSCHCmnNonDlfsUeRbAlloc);
25780 RgSch5gtfUeCb *ue5gtfCb = &(ue->ue5gtfCb);
25781 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[ue5gtfCb->BeamId]);
25783 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25784 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25785 *isDlBwAvail = TRUE;
25787 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
25789 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25790 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
25792 printf("5GTF_ERROR vrbg allocated > 25\n");
25796 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX
25797 || dlUe->proc->tbInfo[1].isAckNackDtx)
25799 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25803 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25805 if (!(allocInfo->pdcch))
25807 /* Returning ROK since PDCCH might be available for another UE and
25808 * further allocations could be done */
25809 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25810 "5GTF_ERROR : PDCCH allocation failed :ue (%u)",
25812 printf("5GTF_ERROR PDCCH allocation failed\n");
25816 //maxPrb = RGSCH_MIN((allocInfo->vrbgReq * MAX_5GTF_VRBG_SIZE), ue5gtfCb->maxPrb);
25817 //maxPrb = RGSCH_MIN(maxPrb,
25818 //((beamInfo->totVrbgAvail - beamInfo->vrbgStart)* MAX_5GTF_VRBG_SIZE)));
25819 //TODO_SID Need to check for vrbg available after scheduling for same beam.
25820 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
25821 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
25822 //TODO_SID: Setting for max TP
25823 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
25824 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
25825 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
25826 allocInfo->tbInfo[0].tbCb->dlGrnt.SCID = 0;
25827 allocInfo->tbInfo[0].tbCb->dlGrnt.dciFormat = allocInfo->dciFormat;
25828 //Filling temporarily
25829 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
25830 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
25832 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25833 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25834 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25842 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
25846 * Function : rgSCHCmnNonDlfsCcchSduAlloc
25848 * Processing Steps:
25849 * - For each element in the list, Call rgSCHCmnNonDlfsCcchSduRbAlloc().
25850 * - If allocation is successful, add the ueCb to scheduled list of CCCH
25852 * - else, add UeCb to non-scheduled list.
25854 * @param[in] RgSchCellCb *cell
25855 * @param[in, out] RgSchCmnCcchSduRbAlloc *allocInfo
25856 * @param[in] U8 isRetx
25861 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc
25864 RgSchCmnCcchSduRbAlloc *allocInfo,
25868 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc(cell, allocInfo, isRetx)
25870 RgSchCmnCcchSduRbAlloc *allocInfo;
25875 CmLListCp *ccchSduLst = NULLP;
25876 CmLListCp *schdCcchSduLst = NULLP;
25877 CmLListCp *nonSchdCcchSduLst = NULLP;
25878 CmLList *schdLnkNode = NULLP;
25879 CmLList *toBeSchdLnk = NULLP;
25880 RgSchDlSf *dlSf = allocInfo->ccchSduDlSf;
25881 RgSchUeCb *ueCb = NULLP;
25882 RgSchDlHqProcCb *hqP = NULLP;
25883 TRC2(rgSCHCmnNonDlfsCcchSduAlloc);
25887 /* Initialize re-transmitting lists */
25888 ccchSduLst = &(allocInfo->ccchSduRetxLst);
25889 schdCcchSduLst = &(allocInfo->schdCcchSduRetxLst);
25890 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduRetxLst);
25894 /* Initialize transmitting lists */
25895 ccchSduLst = &(allocInfo->ccchSduTxLst);
25896 schdCcchSduLst = &(allocInfo->schdCcchSduTxLst);
25897 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduTxLst);
25900 /* Perform allocaations for the list */
25901 toBeSchdLnk = cmLListFirst(ccchSduLst);
25902 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
25904 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25905 ueCb = hqP->hqE->ue;
25906 schdLnkNode = &hqP->schdLstLnk;
25907 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25908 ret = rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf);
25911 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
25912 * list and return */
25915 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25916 ueCb = hqP->hqE->ue;
25917 schdLnkNode = &hqP->schdLstLnk;
25918 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25919 cmLListAdd2Tail(nonSchdCcchSduLst, schdLnkNode);
25920 toBeSchdLnk = toBeSchdLnk->next;
25921 } while(toBeSchdLnk);
25925 /* Allocation successful: Add UE to the scheduled list */
25926 cmLListAdd2Tail(schdCcchSduLst, schdLnkNode);
25934 * @brief Performs RB allocation for CcchSdu for frequency non-selective cell.
25938 * Function : rgSCHCmnNonDlfsCcchSduRbAlloc
25940 * Processing Steps:
25942 * - Allocate consecutively available RBs
25944 * @param[in] RgSchCellCb *cell
25945 * @param[in] RgSchUeCb *ueCb
25946 * @param[in] RgSchDlSf *dlSf
25952 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc
25959 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf)
25965 RgSchDlRbAlloc *allocInfo;
25966 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
25968 TRC2(rgSCHCmnNonDlfsCcchSduRbAlloc);
25971 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb,cell);
25973 /* [ccpu00138802]-MOD-If Bw is less than required, return fail
25974 It will be allocated in next TTI */
25976 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
25977 (dlSf->bwAlloced == dlSf->bw))
25979 if((dlSf->bwAlloced == dlSf->bw) ||
25980 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
25985 /* Retrieve PDCCH */
25986 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
25987 if (ueDl->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
25989 /* allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, dlSf, y, ueDl->cqi,
25990 * TFU_DCI_FORMAT_1A, TRUE);*/
25991 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, TRUE);
25995 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE);
25997 if (!(allocInfo->pdcch))
25999 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
26003 /* Update allocation information */
26004 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
26005 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
26006 allocInfo->allocInfo.raType2.isLocal = TRUE;
26008 /*Fix for ccpu00123918*/
26009 /* Push this harq process back to the free queue */
26010 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
26011 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
26012 allocInfo->rbsAlloc = allocInfo->rbsReq;
26013 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26014 /* Update the sub-frame with new allocation */
26016 /* LTE_ADV_FLAG_REMOVED_START */
26018 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
26020 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf,
26021 allocInfo->allocInfo.raType2.rbStart,
26022 allocInfo->allocInfo.raType2.numRb);
26025 #endif /* end of ifndef LTE_TDD*/
26027 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf,
26028 allocInfo->allocInfo.raType2.rbStart,
26029 allocInfo->allocInfo.raType2.numRb);
26032 /* LTE_ADV_FLAG_REMOVED_END */
26033 /* ccpu00131941 - bwAlloced is updated from SPS bandwidth */
26041 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
26045 * Function : rgSCHCmnNonDlfsMsg4RbAlloc
26047 * Processing Steps:
26049 * - Allocate consecutively available RBs
26051 * @param[in] RgSchCellCb *cell
26052 * @param[in] RgSchRaCb *raCb
26053 * @param[in] RgSchDlSf *dlSf
26059 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc
26066 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf)
26072 RgSchDlRbAlloc *allocInfo;
26073 TRC2(rgSCHCmnNonDlfsMsg4RbAlloc);
26076 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_RACB(raCb);
26079 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
26080 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
26082 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
26083 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
26085 printf("5GTF_ERROR vrbg allocated > 25\n");
26090 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
26091 (dlSf->bwAlloced == dlSf->bw))
26093 if((dlSf->bwAlloced == dlSf->bw) ||
26094 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
26101 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
26102 if (raCb->dlHqE->msg4Proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
26104 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, TRUE);
26108 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, FALSE);
26110 if (!(allocInfo->pdcch))
26112 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
26117 /* SR_RACH_STATS : MSG4 TX Failed */
26118 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26120 /* Update allocation information */
26121 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
26122 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
26123 allocInfo->allocInfo.raType2.isLocal = TRUE;
26126 /*Fix for ccpu00123918*/
26127 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
26128 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
26129 /* LTE_ADV_FLAG_REMOVED_START */
26131 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
26133 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
26134 allocInfo->allocInfo.raType2.rbStart, \
26135 allocInfo->allocInfo.raType2.numRb);
26138 #endif /* end of ifndef LTE_TDD */
26140 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
26141 allocInfo->allocInfo.raType2.rbStart, \
26142 allocInfo->allocInfo.raType2.numRb);
26144 /* LTE_ADV_FLAG_REMOVED_END */
26146 allocInfo->rbsAlloc = allocInfo->rbsReq;
26147 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26151 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26153 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
26154 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
26156 /* Update allocation information */
26157 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26159 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
26160 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
26161 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
26163 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
26164 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
26167 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26168 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26169 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26177 * @brief Performs RB allocation for Msg4 lists of frequency non-selective cell.
26181 * Function : rgSCHCmnNonDlfsMsg4Alloc
26183 * Processing Steps:
26184 * - For each element in the list, Call rgSCHCmnNonDlfsMsg4RbAlloc().
26185 * - If allocation is successful, add the raCb to scheduled list of MSG4.
26186 * - else, add RaCb to non-scheduled list.
26188 * @param[in] RgSchCellCb *cell
26189 * @param[in, out] RgSchCmnMsg4RbAlloc *allocInfo
26190 * @param[in] U8 isRetx
26195 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc
26198 RgSchCmnMsg4RbAlloc *allocInfo,
26202 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc(cell, allocInfo, isRetx)
26204 RgSchCmnMsg4RbAlloc *allocInfo;
26209 CmLListCp *msg4Lst = NULLP;
26210 CmLListCp *schdMsg4Lst = NULLP;
26211 CmLListCp *nonSchdMsg4Lst = NULLP;
26212 CmLList *schdLnkNode = NULLP;
26213 CmLList *toBeSchdLnk = NULLP;
26214 RgSchDlSf *dlSf = allocInfo->msg4DlSf;
26215 RgSchRaCb *raCb = NULLP;
26216 RgSchDlHqProcCb *hqP = NULLP;
26217 TRC2(rgSCHCmnNonDlfsMsg4Alloc);
26221 /* Initialize re-transmitting lists */
26222 msg4Lst = &(allocInfo->msg4RetxLst);
26223 schdMsg4Lst = &(allocInfo->schdMsg4RetxLst);
26224 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4RetxLst);
26228 /* Initialize transmitting lists */
26229 msg4Lst = &(allocInfo->msg4TxLst);
26230 schdMsg4Lst = &(allocInfo->schdMsg4TxLst);
26231 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4TxLst);
26234 /* Perform allocaations for the list */
26235 toBeSchdLnk = cmLListFirst(msg4Lst);
26236 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26238 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26239 raCb = hqP->hqE->raCb;
26240 schdLnkNode = &hqP->schdLstLnk;
26241 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26242 ret = rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf);
26245 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
26246 * list and return */
26249 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26250 raCb = hqP->hqE->raCb;
26251 schdLnkNode = &hqP->schdLstLnk;
26252 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26253 cmLListAdd2Tail(nonSchdMsg4Lst, schdLnkNode);
26254 toBeSchdLnk = toBeSchdLnk->next;
26255 } while(toBeSchdLnk);
26259 /* Allocation successful: Add UE to the scheduled list */
26260 cmLListAdd2Tail(schdMsg4Lst, schdLnkNode);
26271 * @brief Performs RB allocation for the list of UEs of a frequency
26272 * non-selective cell.
26276 * Function : rgSCHCmnNonDlfsDedRbAlloc
26278 * Processing Steps:
26279 * - For each element in the list, Call rgSCHCmnNonDlfsUeRbAlloc().
26280 * - If allocation is successful, add the ueCb to scheduled list of UEs.
26281 * - else, add ueCb to non-scheduled list of UEs.
26283 * @param[in] RgSchCellCb *cell
26284 * @param[in, out] RgSchCmnUeRbAlloc *allocInfo
26285 * @param[in] CmLListCp *ueLst,
26286 * @param[in, out] CmLListCp *schdHqPLst,
26287 * @param[in, out] CmLListCp *nonSchdHqPLst
26292 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc
26295 RgSchCmnUeRbAlloc *allocInfo,
26297 CmLListCp *schdHqPLst,
26298 CmLListCp *nonSchdHqPLst
26301 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc(cell, allocInfo, ueLst,
26302 schdHqPLst, nonSchdHqPLst)
26304 RgSchCmnUeRbAlloc *allocInfo;
26306 CmLListCp *schdHqPLst;
26307 CmLListCp *nonSchdHqPLst;
26311 CmLList *schdLnkNode = NULLP;
26312 CmLList *toBeSchdLnk = NULLP;
26313 RgSchDlSf *dlSf = allocInfo->dedDlSf;
26314 RgSchUeCb *ue = NULLP;
26315 RgSchDlHqProcCb *hqP = NULLP;
26317 TRC2(rgSCHCmnNonDlfsDedRbAlloc);
26320 /* Perform allocaations for the list */
26321 toBeSchdLnk = cmLListFirst(ueLst);
26322 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26324 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26326 schdLnkNode = &hqP->schdLstLnk;
26327 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26329 ret = rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, &isDlBwAvail);
26332 /* Allocation failed: Add remaining UEs to non-scheduled
26333 * list and return */
26336 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26338 schdLnkNode = &hqP->schdLstLnk;
26339 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26340 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26341 toBeSchdLnk = toBeSchdLnk->next;
26342 } while(toBeSchdLnk);
26348 #if defined (TENB_STATS) && defined (RG_5GTF)
26349 cell->tenbStats->sch.dl5gtfRbAllocPass++;
26351 /* Allocation successful: Add UE to the scheduled list */
26352 cmLListAdd2Tail(schdHqPLst, schdLnkNode);
26356 #if defined (TENB_STATS) && defined (RG_5GTF)
26357 cell->tenbStats->sch.dl5gtfRbAllocFail++;
26359 /* Allocation failed : Add UE to the non-scheduled list */
26360 printf("5GTF_ERROR Dl rb alloc failed adding nonSchdHqPLst\n");
26361 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26369 * @brief Handles RB allocation for frequency non-selective cell.
26373 * Function : rgSCHCmnNonDlfsRbAlloc
26375 * Invoking Module Processing:
26376 * - SCH shall invoke this if downlink frequency selective is disabled for
26377 * the cell for RB allocation.
26378 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
26379 * estimate and subframe for each allocation to be made to SCH.
26381 * Processing Steps:
26382 * - Allocate sequentially for common channels.
26383 * - For transmitting and re-transmitting UE list.
26385 * - Perform wide-band allocations for UE in increasing order of
26387 * - Determine Imcs for the allocation.
26388 * - Determine RA type.
26389 * - Determine DCI format.
26391 * @param[in] RgSchCellCb *cell
26392 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
26397 PUBLIC Void rgSCHCmnNonDlfsRbAlloc
26400 RgSchCmnDlRbAllocInfo *allocInfo
26403 PUBLIC Void rgSCHCmnNonDlfsRbAlloc(cell, allocInfo)
26405 RgSchCmnDlRbAllocInfo *allocInfo;
26409 RgSchDlRbAlloc *reqAllocInfo;
26410 TRC2(rgSCHCmnNonDlfsRbAlloc);
26412 /* Allocate for MSG4 retransmissions */
26413 if (allocInfo->msg4Alloc.msg4RetxLst.count)
26415 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc RetxLst\n");
26416 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), TRUE);
26419 /* Allocate for MSG4 transmissions */
26420 /* Assuming all the nodes in the list need allocations: rbsReq is valid */
26421 if (allocInfo->msg4Alloc.msg4TxLst.count)
26423 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc txLst\n");
26424 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), FALSE);
26427 /* Allocate for CCCH SDU (received after guard timer expiry)
26428 * retransmissions */
26429 if (allocInfo->ccchSduAlloc.ccchSduRetxLst.count)
26431 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26432 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), TRUE);
26435 /* Allocate for CCCD SDU transmissions */
26436 /* Allocate for CCCH SDU (received after guard timer expiry) transmissions */
26437 if (allocInfo->ccchSduAlloc.ccchSduTxLst.count)
26439 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26440 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), FALSE);
26444 /* Allocate for Random access response */
26445 for (raRspCnt = 0; raRspCnt < RG_SCH_CMN_MAX_CMN_PDCCH; ++raRspCnt)
26447 /* Assuming that the requests will be filled in sequentially */
26448 reqAllocInfo = &(allocInfo->raRspAlloc[raRspCnt]);
26449 if (!reqAllocInfo->rbsReq)
26453 printf("5GTF_ERROR calling RAR rgSCHCmnNonDlfsCmnRbAlloc\n");
26454 // if ((rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo)) != ROK)
26455 if ((rgSCHCmnNonDlfsCmnRbAllocRar(cell, reqAllocInfo)) != ROK)
26461 /* Allocate for RETX+TX UEs */
26462 if(allocInfo->dedAlloc.txRetxHqPLst.count)
26464 printf("5GTF_ERROR TX RETX rgSCHCmnNonDlfsDedRbAlloc\n");
26465 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26466 &(allocInfo->dedAlloc.txRetxHqPLst),
26467 &(allocInfo->dedAlloc.schdTxRetxHqPLst),
26468 &(allocInfo->dedAlloc.nonSchdTxRetxHqPLst));
26471 if((allocInfo->dedAlloc.retxHqPLst.count))
26473 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26474 &(allocInfo->dedAlloc.retxHqPLst),
26475 &(allocInfo->dedAlloc.schdRetxHqPLst),
26476 &(allocInfo->dedAlloc.nonSchdRetxHqPLst));
26479 /* Allocate for transmitting UEs */
26480 if((allocInfo->dedAlloc.txHqPLst.count))
26482 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26483 &(allocInfo->dedAlloc.txHqPLst),
26484 &(allocInfo->dedAlloc.schdTxHqPLst),
26485 &(allocInfo->dedAlloc.nonSchdTxHqPLst));
26488 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cell);
26489 if ((allocInfo->dedAlloc.txRetxHqPLst.count +
26490 allocInfo->dedAlloc.retxHqPLst.count +
26491 allocInfo->dedAlloc.txHqPLst.count) >
26492 cmnCell->dl.maxUePerDlSf)
26494 #ifndef ALIGN_64BIT
26495 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26496 " scheduler exceed maximumUePerDlSf(%u)tx-retx %ld retx %ld tx %ld\n",
26497 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26498 allocInfo->dedAlloc.retxHqPLst.count,
26499 allocInfo->dedAlloc.txHqPLst.count));
26501 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26502 " scheduler exceed maximumUePerDlSf(%u)tx-retx %d retx %d tx %d\n",
26503 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26504 allocInfo->dedAlloc.retxHqPLst.count,
26505 allocInfo->dedAlloc.txHqPLst.count));
26510 /* LTE_ADV_FLAG_REMOVED_START */
26511 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
26513 printf("5GTF_ERROR RETX rgSCHCmnNonDlfsDsfrRntpComp\n");
26514 rgSCHCmnNonDlfsDsfrRntpComp(cell, allocInfo->dedAlloc.dedDlSf);
26516 /* LTE_ADV_FLAG_REMOVED_END */
26517 #endif /* LTE_TDD */
26521 /***********************************************************
26523 * Func : rgSCHCmnCalcRiv
26525 * Desc : This function calculates RIV.
26531 * File : rg_sch_utl.c
26533 **********************************************************/
26536 PUBLIC U32 rgSCHCmnCalcRiv
26543 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26550 PUBLIC U32 rgSCHCmnCalcRiv
26557 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26564 U8 numRbMinus1 = numRb - 1;
26567 TRC2(rgSCHCmnCalcRiv);
26569 if (numRbMinus1 <= bw/2)
26571 riv = bw * numRbMinus1 + rbStart;
26575 riv = bw * (bw - numRbMinus1) + (bw - rbStart - 1);
26578 } /* rgSCHCmnCalcRiv */
26582 * @brief This function allocates and copies the RACH response scheduling
26583 * related information into cell control block.
26587 * Function: rgSCHCmnDlCpyRachInfo
26588 * Purpose: This function allocates and copies the RACH response
26589 * scheduling related information into cell control block
26590 * for each DL subframe.
26593 * Invoked by: Scheduler
26595 * @param[in] RgSchCellCb* cell
26596 * @param[in] RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES]
26597 * @param[in] U8 raArrSz
26602 PRIVATE S16 rgSCHCmnDlCpyRachInfo
26605 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
26609 PRIVATE S16 rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz)
26611 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES];
26615 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
26624 TRC2(rgSCHCmnDlCpyRachInfo);
26626 /* Allocate RACH response information for each DL
26627 * subframe in a radio frame */
26628 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cell->rachRspLst,
26629 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1] *
26630 sizeof(RgSchTddRachRspLst));
26636 for(sfnIdx=raArrSz-1; sfnIdx>=0; sfnIdx--)
26638 for(subfrmIdx=0; subfrmIdx < RGSCH_NUM_SUB_FRAMES; subfrmIdx++)
26640 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
26641 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
26646 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx],subfrmIdx);
26648 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26650 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddNumDlSubfrmTbl[ulDlCfgIdx],subfrmIdx);
26651 sfNum = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][subfrmIdx]-1;
26652 numRfs = cell->rachRspLst[sfNum].numRadiofrms;
26653 /* For each DL subframe in which RACH response can
26654 * be sent is updated */
26657 cell->rachRspLst[sfNum].rachRsp[numRfs].sfnOffset =
26658 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset;
26659 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26661 cell->rachRspLst[sfNum].rachRsp[numRfs].\
26662 subframe[sfcount] =
26663 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].\
26666 cell->rachRspLst[sfNum].rachRsp[numRfs].numSubfrms =
26667 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26668 cell->rachRspLst[sfNum].numRadiofrms++;
26671 /* Copy the subframes to be deleted at ths subframe */
26673 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26676 cell->rachRspLst[sfNum].delInfo.sfnOffset =
26677 rachRspLst[sfnIdx][subfrmIdx].delInfo.sfnOffset;
26678 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26680 cell->rachRspLst[sfNum].delInfo.subframe[sfcount] =
26681 rachRspLst[sfnIdx][subfrmIdx].delInfo.subframe[sfcount];
26683 cell->rachRspLst[sfNum].delInfo.numSubfrms =
26684 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26692 * @brief This function determines the iTbs based on the new CFI,
26693 * CQI and BLER based delta iTbs
26697 * Function: rgSchCmnFetchItbs
26698 * Purpose: Fetch the new iTbs when CFI changes.
26700 * @param[in] RgSchCellCb *cell
26701 * @param[in] RgSchCmnDlUe *ueDl
26702 * @param[in] U8 cqi
26709 PRIVATE S32 rgSchCmnFetchItbs
26712 RgSchCmnDlUe *ueDl,
26720 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, subFrm, cqi, cfi, cwIdx, noLyr)
26722 RgSchCmnDlUe *ueDl;
26731 PRIVATE S32 rgSchCmnFetchItbs
26734 RgSchCmnDlUe *ueDl,
26741 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, cqi, cfi, cwIdx, noLyr)
26743 RgSchCmnDlUe *ueDl;
26752 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
26755 TRC2(rgSchCmnFetchItbs);
26758 /* Special Handling for Spl Sf when CFI is 3 as
26759 * CFI in Spl Sf will be max 2 */
26760 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
26762 if((cellDl->currCfi == 3) ||
26763 ((cell->bwCfg.dlTotalBw <= 10) && (cellDl->currCfi == 1)))
26765 /* Use CFI 2 in this case */
26766 iTbs = (ueDl->laCb[cwIdx].deltaiTbs +
26767 ((*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][2]))[cqi])* 100)/100;
26769 RG_SCH_CHK_ITBS_RANGE(iTbs, RGSCH_NUM_ITBS - 1);
26773 iTbs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1];
26775 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26777 else /* CFI Changed. Update with new iTbs Reset the BLER*/
26780 S32 tmpiTbs = (*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][cfi]))[cqi];
26782 iTbs = (ueDl->laCb[cwIdx].deltaiTbs + tmpiTbs*100)/100;
26784 RG_SCH_CHK_ITBS_RANGE(iTbs, tmpiTbs);
26786 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26788 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1] = iTbs;
26790 ueDl->lastCfi = cfi;
26791 ueDl->laCb[cwIdx].deltaiTbs = 0;
26798 * @brief This function determines the RBs and Bytes required for BO
26799 * transmission for UEs configured with TM 1/2/6/7.
26803 * Function: rgSCHCmnDlAllocTxRb1Tb1Cw
26804 * Purpose: Allocate TB1 on CW1.
26806 * Reference Parameter effBo is filled with alloced bytes.
26807 * Returns RFAILED if BO not satisfied at all.
26809 * Invoked by: rgSCHCmnDlAllocTxRbTM1/2/6/7
26811 * @param[in] RgSchCellCb *cell
26812 * @param[in] RgSchDlSf *subFrm
26813 * @param[in] RgSchUeCb *ue
26814 * @param[in] U32 bo
26815 * @param[out] U32 *effBo
26816 * @param[in] RgSchDlHqProcCb *proc
26817 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26822 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw
26829 RgSchDlHqProcCb *proc,
26830 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26833 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26839 RgSchDlHqProcCb *proc;
26840 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26843 RgSchDlRbAlloc *allocInfo;
26846 TRC2(rgSCHCmnDlAllocTxRb1Tb1Cw);
26849 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26851 if (ue->ue5gtfCb.rank == 2)
26853 allocInfo->dciFormat = TFU_DCI_FORMAT_B2;
26857 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26860 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26861 allocInfo->raType);
26863 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
26864 bo, &numRb, effBo);
26865 if (ret == RFAILED)
26867 /* If allocation couldn't be made then return */
26870 /* Adding UE to RbAllocInfo TX Lst */
26871 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
26872 /* Fill UE alloc Info */
26873 allocInfo->rbsReq = numRb;
26874 allocInfo->dlSf = subFrm;
26876 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26884 * @brief This function determines the RBs and Bytes required for BO
26885 * retransmission for UEs configured with TM 1/2/6/7.
26889 * Function: rgSCHCmnDlAllocRetxRb1Tb1Cw
26890 * Purpose: Allocate TB1 on CW1.
26892 * Reference Parameter effBo is filled with alloced bytes.
26893 * Returns RFAILED if BO not satisfied at all.
26895 * Invoked by: rgSCHCmnDlAllocRetxRbTM1/2/6/7
26897 * @param[in] RgSchCellCb *cell
26898 * @param[in] RgSchDlSf *subFrm
26899 * @param[in] RgSchUeCb *ue
26900 * @param[in] U32 bo
26901 * @param[out] U32 *effBo
26902 * @param[in] RgSchDlHqProcCb *proc
26903 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26908 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw
26915 RgSchDlHqProcCb *proc,
26916 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26919 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26925 RgSchDlHqProcCb *proc;
26926 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26929 RgSchDlRbAlloc *allocInfo;
26932 TRC2(rgSCHCmnDlAllocRetxRb1Tb1Cw);
26935 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26938 /* 5GTF: RETX DCI format same as TX */
26939 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26940 &allocInfo->raType);
26943 /* Get the Allocation in terms of RBs that are required for
26944 * this retx of TB1 */
26945 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
26947 if (ret == RFAILED)
26949 /* Allocation couldn't be made for Retx */
26950 /* Fix : syed If TxRetx allocation failed then add the UE along with the proc
26951 * to the nonSchdTxRetxUeLst and let spfc scheduler take care of it during
26953 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
26956 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
26957 /* Fill UE alloc Info */
26958 allocInfo->rbsReq = numRb;
26959 allocInfo->dlSf = subFrm;
26961 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26969 * @brief This function determines the RBs and Bytes required for BO
26970 * transmission for UEs configured with TM 2.
26974 * Function: rgSCHCmnDlAllocTxRbTM1
26977 * Reference Parameter effBo is filled with alloced bytes.
26978 * Returns RFAILED if BO not satisfied at all.
26980 * Invoked by: rgSCHCmnDlAllocTxRb
26982 * @param[in] RgSchCellCb *cell
26983 * @param[in] RgSchDlSf *subFrm
26984 * @param[in] RgSchUeCb *ue
26985 * @param[in] U32 bo
26986 * @param[out] U32 *effBo
26987 * @param[in] RgSchDlHqProcCb *proc
26988 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26993 PRIVATE Void rgSCHCmnDlAllocTxRbTM1
27000 RgSchDlHqProcCb *proc,
27001 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27004 PRIVATE Void rgSCHCmnDlAllocTxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27010 RgSchDlHqProcCb *proc;
27011 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27014 TRC2(rgSCHCmnDlAllocTxRbTM1);
27015 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27021 * @brief This function determines the RBs and Bytes required for BO
27022 * retransmission for UEs configured with TM 2.
27026 * Function: rgSCHCmnDlAllocRetxRbTM1
27029 * Reference Parameter effBo is filled with alloced bytes.
27030 * Returns RFAILED if BO not satisfied at all.
27032 * Invoked by: rgSCHCmnDlAllocRetxRb
27034 * @param[in] RgSchCellCb *cell
27035 * @param[in] RgSchDlSf *subFrm
27036 * @param[in] RgSchUeCb *ue
27037 * @param[in] U32 bo
27038 * @param[out] U32 *effBo
27039 * @param[in] RgSchDlHqProcCb *proc
27040 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27045 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1
27052 RgSchDlHqProcCb *proc,
27053 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27056 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27062 RgSchDlHqProcCb *proc;
27063 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27066 TRC2(rgSCHCmnDlAllocRetxRbTM1);
27067 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27073 * @brief This function determines the RBs and Bytes required for BO
27074 * transmission for UEs configured with TM 2.
27078 * Function: rgSCHCmnDlAllocTxRbTM2
27081 * Reference Parameter effBo is filled with alloced bytes.
27082 * Returns RFAILED if BO not satisfied at all.
27084 * Invoked by: rgSCHCmnDlAllocTxRb
27086 * @param[in] RgSchCellCb *cell
27087 * @param[in] RgSchDlSf *subFrm
27088 * @param[in] RgSchUeCb *ue
27089 * @param[in] U32 bo
27090 * @param[out] U32 *effBo
27091 * @param[in] RgSchDlHqProcCb *proc
27092 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27097 PRIVATE Void rgSCHCmnDlAllocTxRbTM2
27104 RgSchDlHqProcCb *proc,
27105 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27108 PRIVATE Void rgSCHCmnDlAllocTxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27114 RgSchDlHqProcCb *proc;
27115 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27118 TRC2(rgSCHCmnDlAllocTxRbTM2);
27119 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27125 * @brief This function determines the RBs and Bytes required for BO
27126 * retransmission for UEs configured with TM 2.
27130 * Function: rgSCHCmnDlAllocRetxRbTM2
27133 * Reference Parameter effBo is filled with alloced bytes.
27134 * Returns RFAILED if BO not satisfied at all.
27136 * Invoked by: rgSCHCmnDlAllocRetxRb
27138 * @param[in] RgSchCellCb *cell
27139 * @param[in] RgSchDlSf *subFrm
27140 * @param[in] RgSchUeCb *ue
27141 * @param[in] U32 bo
27142 * @param[out] U32 *effBo
27143 * @param[in] RgSchDlHqProcCb *proc
27144 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27149 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2
27156 RgSchDlHqProcCb *proc,
27157 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27160 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27166 RgSchDlHqProcCb *proc;
27167 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27170 TRC2(rgSCHCmnDlAllocRetxRbTM2);
27171 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27177 * @brief This function determines the RBs and Bytes required for BO
27178 * transmission for UEs configured with TM 3.
27182 * Function: rgSCHCmnDlAllocTxRbTM3
27185 * Reference Parameter effBo is filled with alloced bytes.
27186 * Returns RFAILED if BO not satisfied at all.
27188 * Invoked by: rgSCHCmnDlAllocTxRb
27190 * @param[in] RgSchCellCb *cell
27191 * @param[in] RgSchDlSf *subFrm
27192 * @param[in] RgSchUeCb *ue
27193 * @param[in] U32 bo
27194 * @param[out] U32 *effBo
27195 * @param[in] RgSchDlHqProcCb *proc
27196 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27201 PRIVATE Void rgSCHCmnDlAllocTxRbTM3
27208 RgSchDlHqProcCb *proc,
27209 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27212 PRIVATE Void rgSCHCmnDlAllocTxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27218 RgSchDlHqProcCb *proc;
27219 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27223 TRC2(rgSCHCmnDlAllocTxRbTM3);
27225 /* Both TBs free for TX allocation */
27226 rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo,\
27227 proc, cellWdAllocInfo);
27234 * @brief This function determines the RBs and Bytes required for BO
27235 * retransmission for UEs configured with TM 3.
27239 * Function: rgSCHCmnDlAllocRetxRbTM3
27242 * Reference Parameter effBo is filled with alloced bytes.
27243 * Returns RFAILED if BO not satisfied at all.
27245 * Invoked by: rgSCHCmnDlAllocRetxRb
27247 * @param[in] RgSchCellCb *cell
27248 * @param[in] RgSchDlSf *subFrm
27249 * @param[in] RgSchUeCb *ue
27250 * @param[in] U32 bo
27251 * @param[out] U32 *effBo
27252 * @param[in] RgSchDlHqProcCb *proc
27253 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27258 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3
27265 RgSchDlHqProcCb *proc,
27266 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27269 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27275 RgSchDlHqProcCb *proc;
27276 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27280 TRC2(rgSCHCmnDlAllocRetxRbTM3);
27282 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
27283 (proc->tbInfo[1].state == HQ_TB_NACKED))
27286 printf ("RETX RB TM3 nack for both hqp %d cell %d \n", proc->procId, proc->hqE->cell->cellId);
27288 /* Both TBs require RETX allocation */
27289 rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo,\
27290 proc, cellWdAllocInfo);
27294 /* One of the TBs need RETX allocation. Other TB may/maynot
27295 * be available for new TX allocation. */
27296 rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo,\
27297 proc, cellWdAllocInfo);
27305 * @brief This function performs the DCI format selection in case of
27306 * Transmit Diversity scheme where there can be more
27307 * than 1 option for DCI format selection.
27311 * Function: rgSCHCmnSlctPdcchFrmt
27312 * Purpose: 1. If DLFS is enabled, then choose TM specific
27313 * DCI format for Transmit diversity. All the
27314 * TM Specific DCI Formats support Type0 and/or
27315 * Type1 resource allocation scheme. DLFS
27316 * supports only Type-0&1 Resource allocation.
27317 * 2. If DLFS is not enabled, select a DCI format
27318 * which is of smaller size. Since Non-DLFS
27319 * scheduler supports all Resource allocation
27320 * schemes, selection is based on efficiency.
27322 * Invoked by: DL UE Allocation by Common Scheduler.
27324 * @param[in] RgSchCellCb *cell
27325 * @param[in] RgSchUeCb *ue
27326 * @param[out] U8 *raType
27327 * @return TfuDciFormat
27331 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt
27338 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt(cell, ue, raType)
27344 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
27346 TRC2(rgSCHCmnSlctPdcchFrmt);
27348 /* ccpu00140894- Selective DCI Format and RA type should be selected only
27349 * after TX Mode transition is completed*/
27350 if ((cellSch->dl.isDlFreqSel) && (ue->txModeTransCmplt))
27352 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciRAType;
27353 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciFrmt);
27357 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciRAType;
27358 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciFrmt);
27364 * @brief This function handles Retx allocation in case of TM3 UEs
27365 * where both the TBs were NACKED previously.
27369 * Function: rgSCHCmnDlTM3RetxRetx
27370 * Purpose: If forceTD flag enabled
27371 * TD for TB1 on CW1.
27373 * DCI Frmt 2A and RA Type 0
27374 * RI layered SM of both TBs on 2 CWs
27375 * Add UE to cell Alloc Info.
27376 * Fill UE alloc Info.
27379 * Successful allocation is indicated by non-zero effBo value.
27381 * Invoked by: rgSCHCmnDlAllocRbTM3
27383 * @param[in] RgSchCellCb *cell
27384 * @param[in] RgSchDlSf *subFrm
27385 * @param[in] RgSchUeCb *ue
27386 * @param[in] U32 bo
27387 * @param[out] U32 *effBo
27388 * @param[in] RgSchDlHqProcCb *proc
27389 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27394 PRIVATE Void rgSCHCmnDlTM3RetxRetx
27401 RgSchDlHqProcCb *proc,
27402 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27405 PRIVATE Void rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27411 RgSchDlHqProcCb *proc;
27412 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27416 RgSchDlRbAlloc *allocInfo;
27423 TRC2(rgSCHCmnDlTM3RetxRetx);
27426 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27428 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27430 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
27431 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27433 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27435 if (ret == RFAILED)
27437 /* Allocation couldn't be made for Retx */
27438 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27441 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27442 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27443 #ifdef FOUR_TX_ANTENNA
27444 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1 should
27445 * have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27446 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27449 proc->cwSwpEnabled = TRUE;
27452 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27453 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27457 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27460 /* Adding UE to allocInfo RETX Lst */
27461 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27463 /* Fill UE alloc Info scratch pad */
27464 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27465 precInfo, noTxLyrs, subFrm);
27472 * @brief This function handles Retx allocation in case of TM4 UEs
27473 * where both the TBs were NACKED previously.
27477 * Function: rgSCHCmnDlTM4RetxRetx
27478 * Purpose: If forceTD flag enabled
27479 * TD for TB1 on CW1.
27481 * DCI Frmt 2 and RA Type 0
27483 * 1 layer SM of TB1 on CW1.
27485 * RI layered SM of both TBs on 2 CWs
27486 * Add UE to cell Alloc Info.
27487 * Fill UE alloc Info.
27490 * Successful allocation is indicated by non-zero effBo value.
27492 * Invoked by: rgSCHCmnDlAllocRbTM4
27494 * @param[in] RgSchCellCb *cell
27495 * @param[in] RgSchDlSf *subFrm
27496 * @param[in] RgSchUeCb *ue
27497 * @param[in] U32 bo
27498 * @param[out] U32 *effBo
27499 * @param[in] RgSchDlHqProcCb *proc
27500 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27505 PRIVATE Void rgSCHCmnDlTM4RetxRetx
27512 RgSchDlHqProcCb *proc,
27513 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27516 PRIVATE Void rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27522 RgSchDlHqProcCb *proc;
27523 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27527 RgSchDlRbAlloc *allocInfo;
27529 Bool swpFlg = FALSE;
27531 #ifdef FOUR_TX_ANTENNA
27536 TRC2(rgSCHCmnDlTM4RetxRetx);
27539 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27541 /* Irrespective of RI Schedule both CWs */
27542 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
27543 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27545 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27547 if (ret == RFAILED)
27549 /* Allocation couldn't be made for Retx */
27550 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27553 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27555 #ifdef FOUR_TX_ANTENNA
27556 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1
27557 * should have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27558 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27561 proc->cwSwpEnabled = TRUE;
27563 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27564 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27568 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27571 /* Adding UE to allocInfo RETX Lst */
27572 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27574 /* Fill UE alloc Info scratch pad */
27575 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27576 precInfo, noTxLyrs, subFrm);
27584 * @brief This function determines Transmission attributes
27585 * incase of Spatial multiplexing for TX and RETX TBs.
27589 * Function: rgSCHCmnDlSMGetAttrForTxRetx
27590 * Purpose: 1. Reached here for a TM3/4 UE's HqP whose one of the TBs is
27591 * NACKED and the other TB is either NACKED or WAITING.
27592 * 2. Select the NACKED TB for RETX allocation.
27593 * 3. Allocation preference for RETX TB by mapping it to a better
27594 * CW (better in terms of efficiency).
27595 * 4. Determine the state of the other TB.
27596 * Determine if swapFlag were to be set.
27597 * Swap flag would be set if Retx TB is cross
27599 * 5. If UE has new data available for TX and if the other TB's state
27600 * is ACKED then set furtherScope as TRUE.
27602 * Invoked by: rgSCHCmnDlTM3[4]TxRetx
27604 * @param[in] RgSchUeCb *ue
27605 * @param[in] RgSchDlHqProcCb *proc
27606 * @param[out] RgSchDlHqTbCb **retxTb
27607 * @param[out] RgSchDlHqTbCb **txTb
27608 * @param[out] Bool *frthrScp
27609 * @param[out] Bool *swpFlg
27614 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx
27617 RgSchDlHqProcCb *proc,
27618 RgSchDlHqTbCb **retxTb,
27619 RgSchDlHqTbCb **txTb,
27624 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, frthrScp,\
27627 RgSchDlHqProcCb *proc;
27628 RgSchDlHqTbCb **retxTb;
27629 RgSchDlHqTbCb **txTb;
27634 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,proc->hqE->cell);
27635 RgSchDlRbAlloc *allocInfo;
27637 TRC2(rgSCHCmnDlSMGetAttrForTxRetx);
27639 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27641 *retxTb = &proc->tbInfo[0];
27642 *txTb = &proc->tbInfo[1];
27643 /* TENB_BRDCM_TM4- Currently disabling swapflag for TM3/TM4, since
27644 * HqFeedback processing does not consider a swapped hq feedback */
27645 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 1))
27648 proc->cwSwpEnabled = TRUE;
27650 if (proc->tbInfo[1].state == HQ_TB_ACKED)
27652 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27653 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27658 *retxTb = &proc->tbInfo[1];
27659 *txTb = &proc->tbInfo[0];
27660 /* TENB_BRDCM_TM4 - Currently disabling swapflag for TM3/TM4, since
27661 * HqFeedback processing does not consider a swapped hq feedback */
27662 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 0))
27665 proc->cwSwpEnabled = TRUE;
27667 if (proc->tbInfo[0].state == HQ_TB_ACKED)
27669 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27670 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27678 * @brief Determine Precoding information for TM3 2 TX Antenna.
27682 * Function: rgSCHCmnDlTM3PrecInf2
27685 * Invoked by: rgSCHCmnDlGetAttrForTM3
27687 * @param[in] RgSchUeCb *ue
27688 * @param[in] U8 numTxLyrs
27689 * @param[in] Bool bothCwEnbld
27694 PRIVATE U8 rgSCHCmnDlTM3PrecInf2
27702 PRIVATE U8 rgSCHCmnDlTM3PrecInf2(ue, numTxLyrs, bothCwEnbld)
27709 TRC2(rgSCHCmnDlTM3PrecInf2);
27716 * @brief Determine Precoding information for TM4 2 TX Antenna.
27720 * Function: rgSCHCmnDlTM4PrecInf2
27721 * Purpose: To determine a logic of deriving precoding index
27722 * information from 36.212 table 5.3.3.1.5-4
27724 * Invoked by: rgSCHCmnDlGetAttrForTM4
27726 * @param[in] RgSchUeCb *ue
27727 * @param[in] U8 numTxLyrs
27728 * @param[in] Bool bothCwEnbld
27733 PRIVATE U8 rgSCHCmnDlTM4PrecInf2
27741 PRIVATE U8 rgSCHCmnDlTM4PrecInf2(ue, numTxLyrs, bothCwEnbld)
27748 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27751 TRC2(rgSCHCmnDlTM4PrecInf2);
27753 if (ueDl->mimoInfo.ri == numTxLyrs)
27755 if (ueDl->mimoInfo.ri == 2)
27757 /* PrecInfo corresponding to 2 CW
27759 if (ue->mimoInfo.puschFdbkVld)
27765 precIdx = ueDl->mimoInfo.pmi - 1;
27770 /* PrecInfo corresponding to 1 CW
27772 if (ue->mimoInfo.puschFdbkVld)
27778 precIdx = ueDl->mimoInfo.pmi + 1;
27782 else if (ueDl->mimoInfo.ri > numTxLyrs)
27784 /* In case of choosing among the columns of a
27785 * precoding matrix, choose the column corresponding
27786 * to the MAX-CQI */
27787 if (ue->mimoInfo.puschFdbkVld)
27793 precIdx = (ueDl->mimoInfo.pmi- 1)* 2 + 1;
27796 else /* if RI < numTxLyrs */
27798 precIdx = (ueDl->mimoInfo.pmi < 2)? 0:1;
27805 * @brief Determine Precoding information for TM3 4 TX Antenna.
27809 * Function: rgSCHCmnDlTM3PrecInf4
27810 * Purpose: To determine a logic of deriving precoding index
27811 * information from 36.212 table 5.3.3.1.5A-2
27813 * Invoked by: rgSCHCmnDlGetAttrForTM3
27815 * @param[in] RgSchUeCb *ue
27816 * @param[in] U8 numTxLyrs
27817 * @param[in] Bool bothCwEnbld
27822 PRIVATE U8 rgSCHCmnDlTM3PrecInf4
27830 PRIVATE U8 rgSCHCmnDlTM3PrecInf4(ue, numTxLyrs, bothCwEnbld)
27839 TRC2(rgSCHCmnDlTM3PrecInf4);
27843 precIdx = numTxLyrs - 2;
27845 else /* one 1 CW transmission */
27854 * @brief Determine Precoding information for TM4 4 TX Antenna.
27858 * Function: rgSCHCmnDlTM4PrecInf4
27859 * Purpose: To determine a logic of deriving precoding index
27860 * information from 36.212 table 5.3.3.1.5-5
27862 * Invoked by: rgSCHCmnDlGetAttrForTM4
27864 * @param[in] RgSchUeCb *ue
27865 * @param[in] U8 numTxLyrs
27866 * @param[in] Bool bothCwEnbld
27871 PRIVATE U8 rgSCHCmnDlTM4PrecInf4
27879 PRIVATE U8 rgSCHCmnDlTM4PrecInf4(cell, ue, numTxLyrs, bothCwEnbld)
27886 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27887 U8 precInfoBaseIdx, precIdx;
27889 TRC2(rgSCHCmnDlTM4PrecInf4);
27891 precInfoBaseIdx = (ue->mimoInfo.puschFdbkVld)? (16):
27892 (ueDl->mimoInfo.pmi);
27895 precIdx = precInfoBaseIdx + (numTxLyrs-2)*17;
27897 else /* one 1 CW transmission */
27899 precInfoBaseIdx += 1;
27900 precIdx = precInfoBaseIdx + (numTxLyrs-1)*17;
27907 * @brief This function determines Transmission attributes
27908 * incase of TM3 scheduling.
27912 * Function: rgSCHCmnDlGetAttrForTM3
27913 * Purpose: Determine retx TB and tx TB based on TB states.
27914 * If forceTD enabled
27915 * perform only retx TB allocation.
27916 * If retxTB == TB2 then DCI Frmt = 2A, RA Type = 0.
27917 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
27919 * perform retxTB allocation on CW1.
27921 * Determine further Scope and Swap Flag attributes
27922 * assuming a 2 CW transmission of RetxTB and new Tx TB.
27923 * If no further scope for new TX allocation
27924 * Allocate only retx TB using 2 layers if
27925 * this TB was previously transmitted using 2 layers AND
27926 * number of Tx antenna ports == 4.
27927 * otherwise do single layer precoding.
27929 * Invoked by: rgSCHCmnDlTM3TxRetx
27931 * @param[in] RgSchUeCb *ue
27932 * @param[in] RgSchDlHqProcCb *proc
27933 * @param[out] U8 *numTxLyrs
27934 * @param[out] Bool *isTraDiv
27935 * @param[out] U8 *prcdngInf
27936 * @param[out] U8 *raType
27941 PRIVATE Void rgSCHCmnDlGetAttrForTM3
27945 RgSchDlHqProcCb *proc,
27947 TfuDciFormat *dciFrmt,
27949 RgSchDlHqTbCb **retxTb,
27950 RgSchDlHqTbCb **txTb,
27956 PRIVATE Void rgSCHCmnDlGetAttrForTM3(cell, ue, proc, numTxLyrs, dciFrmt,\
27957 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
27960 RgSchDlHqProcCb *proc;
27962 TfuDciFormat *dciFrmt;
27964 RgSchDlHqTbCb **retxTb;
27965 RgSchDlHqTbCb **txTb;
27971 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27974 TRC2(rgSCHCmnDlGetAttrForTM3);
27976 /* Avoiding Tx-Retx for LAA cell as firstSchedTime is associated with
27978 /* Integration_fix: SPS Proc shall always have only one Cw */
27980 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
27981 (ueDl->mimoInfo.forceTD))
27983 ||(TRUE == rgSCHLaaSCellEnabled(cell))
27987 if ((ueDl->mimoInfo.forceTD)
27989 || (TRUE == rgSCHLaaSCellEnabled(cell))
27994 /* Transmit Diversity. Format based on dlfsEnabled
27995 * No further scope */
27996 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27998 *retxTb = &proc->tbInfo[0];
27999 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28003 *retxTb = &proc->tbInfo[1];
28004 *dciFrmt = TFU_DCI_FORMAT_2A;
28005 *raType = RG_SCH_CMN_RA_TYPE0;
28013 /* Determine the 2 TB transmission attributes */
28014 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
28018 /* Prefer allocation of RETX TB over 2 layers rather than combining
28019 * it with a new TX. */
28020 if ((ueDl->mimoInfo.ri == 2)
28021 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
28023 /* Allocate TB on CW1, using 2 Lyrs,
28024 * Format 2, precoding accordingly */
28030 *numTxLyrs= ((*retxTb)->numLyrs + ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)].noLyr);
28032 if((*retxTb)->tbIdx == 0 && ((*retxTb)->numLyrs == 2 ) && *numTxLyrs ==3)
28035 proc->cwSwpEnabled = TRUE;
28037 else if((*retxTb)->tbIdx == 1 && ((*retxTb)->numLyrs == 1) && *numTxLyrs ==3)
28040 proc->cwSwpEnabled = TRUE;
28044 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28045 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])\
28046 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28047 *dciFrmt = TFU_DCI_FORMAT_2A;
28048 *raType = RG_SCH_CMN_RA_TYPE0;
28050 else /* frthrScp == FALSE */
28052 if (cell->numTxAntPorts == 2)
28054 /* Transmit Diversity */
28056 if ((*retxTb)->tbIdx == 0)
28058 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28062 /* If retxTB is TB2 then use format 2A */
28063 *dciFrmt = TFU_DCI_FORMAT_2A;
28064 *raType = RG_SCH_CMN_RA_TYPE0;
28069 else /* NumAntPorts == 4 */
28071 if ((*retxTb)->numLyrs == 2)
28073 /* Allocate TB on CW1, using 2 Lyrs,
28074 * Format 2A, precoding accordingly */
28076 *dciFrmt = TFU_DCI_FORMAT_2A;
28077 *raType = RG_SCH_CMN_RA_TYPE0;
28078 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28079 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, *numTxLyrs, *frthrScp);
28084 /* Transmit Diversity */
28086 if ((*retxTb)->tbIdx == 0)
28088 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28092 /* If retxTB is TB2 then use format 2A */
28093 *dciFrmt = TFU_DCI_FORMAT_2A;
28094 *raType = RG_SCH_CMN_RA_TYPE0;
28108 * @brief This function determines Transmission attributes
28109 * incase of TM4 scheduling.
28113 * Function: rgSCHCmnDlGetAttrForTM4
28114 * Purpose: Determine retx TB and tx TB based on TB states.
28115 * If forceTD enabled
28116 * perform only retx TB allocation.
28117 * If retxTB == TB2 then DCI Frmt = 2, RA Type = 0.
28118 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
28120 * perform retxTB allocation on CW1.
28122 * Determine further Scope and Swap Flag attributes
28123 * assuming a 2 CW transmission of RetxTB and new Tx TB.
28124 * If no further scope for new TX allocation
28125 * Allocate only retx TB using 2 layers if
28126 * this TB was previously transmitted using 2 layers AND
28127 * number of Tx antenna ports == 4.
28128 * otherwise do single layer precoding.
28130 * Invoked by: rgSCHCmnDlTM4TxRetx
28132 * @param[in] RgSchUeCb *ue
28133 * @param[in] RgSchDlHqProcCb *proc
28134 * @param[out] U8 *numTxLyrs
28135 * @param[out] Bool *isTraDiv
28136 * @param[out] U8 *prcdngInf
28137 * @param[out] U8 *raType
28142 PRIVATE Void rgSCHCmnDlGetAttrForTM4
28146 RgSchDlHqProcCb *proc,
28148 TfuDciFormat *dciFrmt,
28150 RgSchDlHqTbCb **retxTb,
28151 RgSchDlHqTbCb **txTb,
28157 PRIVATE Void rgSCHCmnDlGetAttrForTM4(cell, ue, proc, numTxLyrs, dciFrmt,\
28158 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
28161 RgSchDlHqProcCb *proc;
28163 TfuDciFormat *dciFrmt;
28165 RgSchDlHqTbCb **retxTb;
28166 RgSchDlHqTbCb **txTb;
28172 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28175 TRC2(rgSCHCmnDlGetAttrForTM4);
28178 /* Integration_fix: SPS Proc shall always have only one Cw */
28180 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28181 (ueDl->mimoInfo.forceTD))
28183 ||(TRUE == rgSCHLaaSCellEnabled(cell))
28187 if ((ueDl->mimoInfo.forceTD)
28189 || (TRUE == rgSCHLaaSCellEnabled(cell))
28194 /* Transmit Diversity. Format based on dlfsEnabled
28195 * No further scope */
28196 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28198 *retxTb = &proc->tbInfo[0];
28199 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28203 *retxTb = &proc->tbInfo[1];
28204 *dciFrmt = TFU_DCI_FORMAT_2;
28205 *raType = RG_SCH_CMN_RA_TYPE0;
28213 if (ueDl->mimoInfo.ri == 1)
28215 /* single layer precoding. Format 2.
28216 * No further scope */
28217 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28219 *retxTb = &proc->tbInfo[0];
28223 *retxTb = &proc->tbInfo[1];
28226 *dciFrmt = TFU_DCI_FORMAT_2;
28227 *raType = RG_SCH_CMN_RA_TYPE0;
28229 *prcdngInf = 0; /*When RI= 1*/
28233 /* Determine the 2 TB transmission attributes */
28234 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
28236 *dciFrmt = TFU_DCI_FORMAT_2;
28237 *raType = RG_SCH_CMN_RA_TYPE0;
28240 /* Prefer allocation of RETX TB over 2 layers rather than combining
28241 * it with a new TX. */
28242 if ((ueDl->mimoInfo.ri == 2)
28243 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
28245 /* Allocate TB on CW1, using 2 Lyrs,
28246 * Format 2, precoding accordingly */
28250 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28251 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])
28252 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28254 else /* frthrScp == FALSE */
28256 if (cell->numTxAntPorts == 2)
28258 /* single layer precoding. Format 2. */
28260 *prcdngInf = (getPrecInfoFunc[1][cell->numTxAntPorts/2 - 1])\
28261 (cell, ue, *numTxLyrs, *frthrScp);
28264 else /* NumAntPorts == 4 */
28266 if ((*retxTb)->numLyrs == 2)
28268 /* Allocate TB on CW1, using 2 Lyrs,
28269 * Format 2, precoding accordingly */
28271 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28272 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28273 (cell, ue, *numTxLyrs, *frthrScp);
28278 /* Allocate TB with 1 lyr precoding,
28279 * Format 2, precoding info accordingly */
28281 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28282 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28283 (cell, ue, *numTxLyrs, *frthrScp);
28294 * @brief This function handles Retx allocation in case of TM3 UEs
28295 * where previously one of the TBs was NACKED and the other
28296 * TB is either ACKED/WAITING.
28300 * Function: rgSCHCmnDlTM3TxRetx
28301 * Purpose: Determine the TX attributes for TM3 TxRetx Allocation.
28302 * If futher Scope for New Tx Allocation on other TB
28303 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28304 * Add UE to cell wide RetxTx List.
28306 * Perform only RETX alloc'n on CW1.
28307 * Add UE to cell wide Retx List.
28309 * effBo is set to a non-zero value if allocation is
28312 * Invoked by: rgSCHCmnDlAllocRbTM3
28314 * @param[in] RgSchCellCb *cell
28315 * @param[in] RgSchDlSf *subFrm
28316 * @param[in] RgSchUeCb *ue
28317 * @param[in] U32 bo
28318 * @param[out] U32 *effBo
28319 * @param[in] RgSchDlHqProcCb *proc
28320 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28325 PRIVATE Void rgSCHCmnDlTM3TxRetx
28332 RgSchDlHqProcCb *proc,
28333 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28336 PRIVATE Void rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28342 RgSchDlHqProcCb *proc;
28343 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28347 RgSchDlRbAlloc *allocInfo;
28349 RgSchDlHqTbCb *retxTb, *txTb;
28355 TRC2(rgSCHCmnDlTM3TxRetx);
28359 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28362 /* Determine the transmission attributes */
28363 rgSCHCmnDlGetAttrForTM3(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28364 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28365 &allocInfo->raType);
28370 printf ("TX RETX called from proc %d cell %d \n",proc->procId, cell->cellId);
28372 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28374 if (ret == RFAILED)
28376 /* Allocation couldn't be made for Retx */
28377 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28380 /* Adding UE to RbAllocInfo RETX-TX Lst */
28381 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28385 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28386 numTxLyrs, &numRb, effBo);
28387 if (ret == RFAILED)
28389 /* Allocation couldn't be made for Retx */
28390 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28394 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28397 /* Adding UE to allocInfo RETX Lst */
28398 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28401 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28402 prcdngInf, numTxLyrs, subFrm);
28409 * @brief This function handles Retx allocation in case of TM4 UEs
28410 * where previously one of the TBs was NACKED and the other
28411 * TB is either ACKED/WAITING.
28415 * Function: rgSCHCmnDlTM4TxRetx
28416 * Purpose: Determine the TX attributes for TM4 TxRetx Allocation.
28417 * If futher Scope for New Tx Allocation on other TB
28418 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28419 * Add UE to cell wide RetxTx List.
28421 * Perform only RETX alloc'n on CW1.
28422 * Add UE to cell wide Retx List.
28424 * effBo is set to a non-zero value if allocation is
28427 * Invoked by: rgSCHCmnDlAllocRbTM4
28429 * @param[in] RgSchCellCb *cell
28430 * @param[in] RgSchDlSf *subFrm
28431 * @param[in] RgSchUeCb *ue
28432 * @param[in] U32 bo
28433 * @param[out] U32 *effBo
28434 * @param[in] RgSchDlHqProcCb *proc
28435 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28440 PRIVATE Void rgSCHCmnDlTM4TxRetx
28447 RgSchDlHqProcCb *proc,
28448 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28451 PRIVATE Void rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28457 RgSchDlHqProcCb *proc;
28458 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28462 RgSchDlRbAlloc *allocInfo;
28464 RgSchDlHqTbCb *retxTb, *txTb;
28470 TRC2(rgSCHCmnDlTM4TxRetx);
28473 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28476 /* Determine the transmission attributes */
28477 rgSCHCmnDlGetAttrForTM4(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28478 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28479 &allocInfo->raType);
28483 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28485 if (ret == RFAILED)
28487 /* Fix : syed If TxRetx allocation failed then add the UE along
28488 * with the proc to the nonSchdTxRetxUeLst and let spfc scheduler
28489 * take care of it during finalization. */
28490 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28493 /* Adding UE to RbAllocInfo RETX-TX Lst */
28494 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28498 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28499 numTxLyrs, &numRb, effBo);
28500 if (ret == RFAILED)
28502 /* Allocation couldn't be made for Retx */
28503 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28507 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28510 /* Adding UE to allocInfo RETX Lst */
28511 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28514 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28515 prcdngInf, numTxLyrs, subFrm)
28522 * @brief This function handles Retx allocation in case of TM4 UEs
28523 * where previously both the TBs were ACKED and ACKED
28528 * Function: rgSCHCmnDlTM3TxTx
28529 * Purpose: Reached here for a TM3 UE's HqP's fresh allocation
28530 * where both the TBs are free for TX scheduling.
28531 * If forceTD flag is set
28532 * perform TD on CW1 with TB1.
28537 * RI layered precoding 2 TB on 2 CW.
28538 * Set precoding info.
28539 * Add UE to cellAllocInfo.
28540 * Fill ueAllocInfo.
28542 * effBo is set to a non-zero value if allocation is
28545 * Invoked by: rgSCHCmnDlAllocRbTM3
28547 * @param[in] RgSchCellCb *cell
28548 * @param[in] RgSchDlSf *subFrm
28549 * @param[in] RgSchUeCb *ue
28550 * @param[in] U32 bo
28551 * @param[out] U32 *effBo
28552 * @param[in] RgSchDlHqProcCb *proc
28553 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28558 PRIVATE Void rgSCHCmnDlTM3TxTx
28565 RgSchDlHqProcCb *proc,
28566 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28569 PRIVATE Void rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28575 RgSchDlHqProcCb *proc;
28576 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28579 RgSchCmnDlUe *ueDl;
28580 RgSchDlRbAlloc *allocInfo;
28587 TRC2(rgSCHCmnDlTM3TxTx);
28590 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28591 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28593 /* Integration_fix: SPS Proc shall always have only one Cw */
28595 #ifdef FOUR_TX_ANTENNA
28596 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28597 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28599 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28600 (ueDl->mimoInfo.forceTD))
28603 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28606 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28607 &allocInfo->raType);
28608 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28609 bo, &numRb, effBo);
28610 if (ret == RFAILED)
28612 /* If allocation couldn't be made then return */
28616 precInfo = 0; /* TD */
28618 else /* Precoding */
28620 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
28621 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28623 /* Spatial Multiplexing using 2 CWs */
28624 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28625 if (ret == RFAILED)
28627 /* If allocation couldn't be made then return */
28630 noTxLyrs = ueDl->mimoInfo.ri;
28631 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28632 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, getPrecInfoFunc[0], precInfoAntIdx);
28633 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28637 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28640 /* Adding UE to RbAllocInfo TX Lst */
28641 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28643 /* Fill UE allocInfo scrath pad */
28644 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28645 precInfo, noTxLyrs, subFrm);
28652 * @brief This function handles Retx allocation in case of TM4 UEs
28653 * where previously both the TBs were ACKED and ACKED
28658 * Function: rgSCHCmnDlTM4TxTx
28659 * Purpose: Reached here for a TM4 UE's HqP's fresh allocation
28660 * where both the TBs are free for TX scheduling.
28661 * If forceTD flag is set
28662 * perform TD on CW1 with TB1.
28668 * Single layer precoding of TB1 on CW1.
28669 * Set precoding info.
28671 * RI layered precoding 2 TB on 2 CW.
28672 * Set precoding info.
28673 * Add UE to cellAllocInfo.
28674 * Fill ueAllocInfo.
28676 * effBo is set to a non-zero value if allocation is
28679 * Invoked by: rgSCHCmnDlAllocRbTM4
28681 * @param[in] RgSchCellCb *cell
28682 * @param[in] RgSchDlSf *subFrm
28683 * @param[in] RgSchUeCb *ue
28684 * @param[in] U32 bo
28685 * @param[out] U32 *effBo
28686 * @param[in] RgSchDlHqProcCb *proc
28687 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28692 PRIVATE Void rgSCHCmnDlTM4TxTx
28699 RgSchDlHqProcCb *proc,
28700 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28703 PRIVATE Void rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28709 RgSchDlHqProcCb *proc;
28710 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28713 RgSchCmnDlUe *ueDl;
28714 RgSchDlRbAlloc *allocInfo;
28721 TRC2(rgSCHCmnDlTM4TxTx);
28724 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28725 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28727 /* Integration_fix: SPS Proc shall always have only one Cw */
28729 #ifdef FOUR_TX_ANTENNA
28730 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28731 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28733 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28734 (ueDl->mimoInfo.forceTD))
28737 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28740 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28741 &allocInfo->raType);
28743 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28744 bo, &numRb, effBo);
28745 if (ret == RFAILED)
28747 /* If allocation couldn't be made then return */
28751 precInfo = 0; /* TD */
28753 else /* Precoding */
28755 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
28756 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28758 if (ueDl->mimoInfo.ri == 1)
28760 /* Single Layer SM using FORMAT 2 */
28761 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28762 bo, &numRb, effBo);
28763 if (ret == RFAILED)
28765 /* If allocation couldn't be made then return */
28769 precInfo = 0; /* PrecInfo as 0 for RI=1*/
28773 /* Spatial Multiplexing using 2 CWs */
28774 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28775 if (ret == RFAILED)
28777 /* If allocation couldn't be made then return */
28780 noTxLyrs = ueDl->mimoInfo.ri;
28781 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28782 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28788 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28791 /* Adding UE to RbAllocInfo TX Lst */
28792 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28795 /* Fill UE allocInfo scrath pad */
28796 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28797 precInfo, noTxLyrs, subFrm);
28804 * @brief This function determines the RBs and Bytes required for BO
28805 * transmission for UEs configured with TM 4.
28809 * Function: rgSCHCmnDlAllocTxRbTM4
28810 * Purpose: Invokes the functionality particular to the
28811 * current state of the TBs of the "proc".
28813 * Reference Parameter effBo is filled with alloced bytes.
28814 * Returns RFAILED if BO not satisfied at all.
28816 * Invoked by: rgSCHCmnDlAllocTxRb
28818 * @param[in] RgSchCellCb *cell
28819 * @param[in] RgSchDlSf *subFrm
28820 * @param[in] RgSchUeCb *ue
28821 * @param[in] U32 bo
28822 * @param[out] U32 *effBo
28823 * @param[in] RgSchDlHqProcCb *proc
28824 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28829 PRIVATE Void rgSCHCmnDlAllocTxRbTM4
28836 RgSchDlHqProcCb *proc,
28837 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28840 PRIVATE Void rgSCHCmnDlAllocTxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28846 RgSchDlHqProcCb *proc;
28847 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28850 TRC2(rgSCHCmnDlAllocTxRbTM4);
28852 /* Both TBs free for TX allocation */
28853 rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo,\
28854 proc, cellWdAllocInfo);
28861 * @brief This function determines the RBs and Bytes required for BO
28862 * retransmission for UEs configured with TM 4.
28866 * Function: rgSCHCmnDlAllocRetxRbTM4
28867 * Purpose: Invokes the functionality particular to the
28868 * current state of the TBs of the "proc".
28870 * Reference Parameter effBo is filled with alloced bytes.
28871 * Returns RFAILED if BO not satisfied at all.
28873 * Invoked by: rgSCHCmnDlAllocRetxRb
28875 * @param[in] RgSchCellCb *cell
28876 * @param[in] RgSchDlSf *subFrm
28877 * @param[in] RgSchUeCb *ue
28878 * @param[in] U32 bo
28879 * @param[out] U32 *effBo
28880 * @param[in] RgSchDlHqProcCb *proc
28881 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28886 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4
28893 RgSchDlHqProcCb *proc,
28894 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28897 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28903 RgSchDlHqProcCb *proc;
28904 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28907 TRC2(rgSCHCmnDlAllocRetxRbTM4);
28909 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
28910 (proc->tbInfo[1].state == HQ_TB_NACKED))
28912 /* Both TBs require RETX allocation */
28913 rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo,\
28914 proc, cellWdAllocInfo);
28918 /* One of the TBs need RETX allocation. Other TB may/maynot
28919 * be available for new TX allocation. */
28920 rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo,\
28921 proc, cellWdAllocInfo);
28930 * @brief This function determines the RBs and Bytes required for BO
28931 * transmission for UEs configured with TM 5.
28935 * Function: rgSCHCmnDlAllocTxRbTM5
28938 * Reference Parameter effBo is filled with alloced bytes.
28939 * Returns RFAILED if BO not satisfied at all.
28941 * Invoked by: rgSCHCmnDlAllocTxRb
28943 * @param[in] RgSchCellCb *cell
28944 * @param[in] RgSchDlSf *subFrm
28945 * @param[in] RgSchUeCb *ue
28946 * @param[in] U32 bo
28947 * @param[out] U32 *effBo
28948 * @param[in] RgSchDlHqProcCb *proc
28949 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28954 PRIVATE Void rgSCHCmnDlAllocTxRbTM5
28961 RgSchDlHqProcCb *proc,
28962 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28965 PRIVATE Void rgSCHCmnDlAllocTxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28971 RgSchDlHqProcCb *proc;
28972 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28975 TRC2(rgSCHCmnDlAllocTxRbTM5);
28976 #if (ERRCLASS & ERRCLS_DEBUG)
28977 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
28984 * @brief This function determines the RBs and Bytes required for BO
28985 * retransmission for UEs configured with TM 5.
28989 * Function: rgSCHCmnDlAllocRetxRbTM5
28992 * Reference Parameter effBo is filled with alloced bytes.
28993 * Returns RFAILED if BO not satisfied at all.
28995 * Invoked by: rgSCHCmnDlAllocRetxRb
28997 * @param[in] RgSchCellCb *cell
28998 * @param[in] RgSchDlSf *subFrm
28999 * @param[in] RgSchUeCb *ue
29000 * @param[in] U32 bo
29001 * @param[out] U32 *effBo
29002 * @param[in] RgSchDlHqProcCb *proc
29003 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29008 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5
29015 RgSchDlHqProcCb *proc,
29016 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29019 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29025 RgSchDlHqProcCb *proc;
29026 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29029 TRC2(rgSCHCmnDlAllocRetxRbTM5);
29030 #if (ERRCLASS & ERRCLS_DEBUG)
29031 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
29039 * @brief This function determines the RBs and Bytes required for BO
29040 * transmission for UEs configured with TM 6.
29044 * Function: rgSCHCmnDlAllocTxRbTM6
29047 * Reference Parameter effBo is filled with alloced bytes.
29048 * Returns RFAILED if BO not satisfied at all.
29050 * Invoked by: rgSCHCmnDlAllocTxRb
29052 * @param[in] RgSchCellCb *cell
29053 * @param[in] RgSchDlSf *subFrm
29054 * @param[in] RgSchUeCb *ue
29055 * @param[in] U32 bo
29056 * @param[out] U32 *effBo
29057 * @param[in] RgSchDlHqProcCb *proc
29058 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29063 PRIVATE Void rgSCHCmnDlAllocTxRbTM6
29070 RgSchDlHqProcCb *proc,
29071 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29074 PRIVATE Void rgSCHCmnDlAllocTxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29080 RgSchDlHqProcCb *proc;
29081 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29084 RgSchDlRbAlloc *allocInfo;
29085 RgSchCmnDlUe *ueDl;
29089 TRC2(rgSCHCmnDlAllocTxRbTM6);
29092 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29093 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29095 if (ueDl->mimoInfo.forceTD)
29097 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29098 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29102 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29103 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29104 /* Fill precoding information for FORMAT 1B */
29105 /* First 4 least significant bits to indicate PMI.
29106 * 4th most significant corresponds to pmi Confirmation.
29108 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29109 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29111 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
29112 bo, &numRb, effBo);
29113 if (ret == RFAILED)
29115 /* If allocation couldn't be made then return */
29120 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
29123 /* Adding UE to RbAllocInfo TX Lst */
29124 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
29126 /* Fill UE alloc Info */
29127 allocInfo->rbsReq = numRb;
29128 allocInfo->dlSf = subFrm;
29134 * @brief This function determines the RBs and Bytes required for BO
29135 * retransmission for UEs configured with TM 6.
29139 * Function: rgSCHCmnDlAllocRetxRbTM6
29142 * Reference Parameter effBo is filled with alloced bytes.
29143 * Returns RFAILED if BO not satisfied at all.
29145 * Invoked by: rgSCHCmnDlAllocRetxRb
29147 * @param[in] RgSchCellCb *cell
29148 * @param[in] RgSchDlSf *subFrm
29149 * @param[in] RgSchUeCb *ue
29150 * @param[in] U32 bo
29151 * @param[out] U32 *effBo
29152 * @param[in] RgSchDlHqProcCb *proc
29153 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29158 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6
29165 RgSchDlHqProcCb *proc,
29166 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29169 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29175 RgSchDlHqProcCb *proc;
29176 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29179 RgSchDlRbAlloc *allocInfo;
29180 RgSchCmnDlUe *ueDl;
29184 TRC2(rgSCHCmnDlAllocRetxRbTM6);
29187 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29188 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29190 if (ueDl->mimoInfo.forceTD)
29192 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29193 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29197 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29198 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29199 /* Fill precoding information for FORMAT 1B */
29200 /* First 4 least significant bits to indicate PMI.
29201 * 4th most significant corresponds to pmi Confirmation.
29203 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29204 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29207 /* Get the Allocation in terms of RBs that are required for
29208 * this retx of TB1 */
29209 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
29211 if (ret == RFAILED)
29213 /* Allocation couldn't be made for Retx */
29214 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
29217 /* Adding UE to allocInfo RETX Lst */
29218 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
29219 /* Fill UE alloc Info */
29220 allocInfo->rbsReq = numRb;
29221 allocInfo->dlSf = subFrm;
29227 * @brief This function determines the RBs and Bytes required for BO
29228 * transmission for UEs configured with TM 7.
29232 * Function: rgSCHCmnDlAllocTxRbTM7
29235 * Reference Parameter effBo is filled with alloced bytes.
29236 * Returns RFAILED if BO not satisfied at all.
29238 * Invoked by: rgSCHCmnDlAllocTxRb
29240 * @param[in] RgSchCellCb *cell
29241 * @param[in] RgSchDlSf *subFrm
29242 * @param[in] RgSchUeCb *ue
29243 * @param[in] U32 bo
29244 * @param[out] U32 *effBo
29245 * @param[in] RgSchDlHqProcCb *proc
29246 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29251 PRIVATE Void rgSCHCmnDlAllocTxRbTM7
29258 RgSchDlHqProcCb *proc,
29259 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29262 PRIVATE Void rgSCHCmnDlAllocTxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29268 RgSchDlHqProcCb *proc;
29269 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29272 TRC2(rgSCHCmnDlAllocTxRbTM7);
29273 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29279 * @brief This function determines the RBs and Bytes required for BO
29280 * retransmission for UEs configured with TM 7.
29284 * Function: rgSCHCmnDlAllocRetxRbTM7
29287 * Reference Parameter effBo is filled with alloced bytes.
29288 * Returns RFAILED if BO not satisfied at all.
29290 * Invoked by: rgSCHCmnDlAllocRetxRb
29292 * @param[in] RgSchCellCb *cell
29293 * @param[in] RgSchDlSf *subFrm
29294 * @param[in] RgSchUeCb *ue
29295 * @param[in] U32 bo
29296 * @param[out] U32 *effBo
29297 * @param[in] RgSchDlHqProcCb *proc
29298 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29303 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7
29310 RgSchDlHqProcCb *proc,
29311 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29314 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29320 RgSchDlHqProcCb *proc;
29321 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29324 TRC2(rgSCHCmnDlAllocRetxRbTM7);
29325 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29331 * @brief This function invokes the TM specific DL TX RB Allocation routine.
29335 * Function: rgSCHCmnDlAllocTxRb
29336 * Purpose: This function invokes the TM specific
29337 * DL TX RB Allocation routine.
29339 * Invoked by: Specific Schedulers
29341 * @param[in] RgSchCellCb *cell
29342 * @param[in] RgSchDlSf *subFrm
29343 * @param[in] RgSchUeCb *ue
29344 * @param[in] U32 bo
29345 * @param[out] U32 *effBo
29346 * @param[in] RgSchDlHqProcCb *proc
29347 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29352 PUBLIC S16 rgSCHCmnDlAllocTxRb
29359 RgSchDlHqProcCb *proc,
29360 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29363 PUBLIC S16 rgSCHCmnDlAllocTxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29369 RgSchDlHqProcCb *proc;
29370 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29373 U32 newSchBits = 0;
29374 U32 prevSchBits = 0;
29375 RgSchDlRbAlloc *allocInfo;
29377 TRC2(rgSCHCmnDlAllocTxRb);
29379 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29381 ue->dl.aggTbBits = 0;
29385 /* Calculate totals bits previously allocated */
29386 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29387 if (allocInfo->tbInfo[0].schdlngForTb)
29389 prevSchBits += allocInfo->tbInfo[0].bytesReq;
29391 if (allocInfo->tbInfo[1].schdlngForTb)
29393 prevSchBits += allocInfo->tbInfo[1].bytesReq;
29396 /* Call TM specific RB allocation routine */
29397 (dlAllocTxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29398 proc, cellWdAllocInfo);
29402 /* Calculate totals bits newly allocated */
29403 if (allocInfo->tbInfo[0].schdlngForTb)
29405 newSchBits += allocInfo->tbInfo[0].bytesReq;
29407 if (allocInfo->tbInfo[1].schdlngForTb)
29409 newSchBits += allocInfo->tbInfo[1].bytesReq;
29411 if (newSchBits > prevSchBits)
29413 ue->dl.aggTbBits += ((newSchBits - prevSchBits) * 8);
29414 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29421 /* DwPTS Scheduling Changes Start */
29424 * @brief Retransmit decision for TDD. Retx is avoided in below cases
29425 * 1) DL Sf -> Spl Sf
29426 * 2) DL SF -> DL SF 0
29430 * Function: rgSCHCmnRetxAvoidTdd
29431 * Purpose: Avoid allocating RETX for cases 1, 2
29433 * Invoked by: rgSCHCmnRetxAvoidTdd
29435 * @param[in] RgSchDlSf *curSf
29436 * @param[in] RgSchCellCb *cell
29437 * @param[in] RgSchDlHqProcCb *proc
29442 PUBLIC Bool rgSCHCmnRetxAvoidTdd
29446 RgSchDlHqProcCb *proc
29449 PUBLIC Bool rgSCHCmnRetxAvoidTdd(curSf, cell, proc)
29452 RgSchDlHqProcCb *proc;
29455 RgSchTddSfType txSfType = 0;
29457 TRC2(rgSCHCmnRetxAvoidTdd);
29459 /* Get the RBs of TB that will be retransmitted */
29460 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29462 txSfType = proc->tbInfo[0].sfType;
29464 #ifdef XEON_SPECIFIC_CHANGES
29465 #ifndef XEON_TDD_SPCL
29466 /* Avoid re-transmission on Normal SF when the corresponding TB wss transmitted on SPCL SF */
29467 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29474 if (proc->tbInfo[1].state == HQ_TB_NACKED)
29476 /* Select the TxSf with the highest num of possible REs
29477 * In ascending order -> 1) SPL SF 2) DL_SF_0 3) DL_SF */
29478 txSfType = RGSCH_MAX(txSfType, proc->tbInfo[1].sfType);
29480 #ifdef XEON_SPECIFIC_CHANGES
29481 #ifndef XEON_TDD_SPCL
29482 /* Avoid re-transmission on Normal SF when the corresponding TB wss tranmitted on SPCL SF */
29483 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29491 if (txSfType > curSf->sfType)
29502 /* DwPTS Scheduling Changes End */
29505 * @brief Avoid allocating RETX incase of collision
29506 * with reserved resources for BCH/PSS/SSS occassions.
29510 * Function: rgSCHCmnRetxAllocAvoid
29511 * Purpose: Avoid allocating RETX incase of collision
29512 * with reserved resources for BCH/PSS/SSS occassions
29514 * Invoked by: rgSCHCmnDlAllocRetxRb
29516 * @param[in] RgSchDlSf *subFrm
29517 * @param[in] RgSchUeCb *ue
29518 * @param[in] RgSchDlHqProcCb *proc
29523 PUBLIC Bool rgSCHCmnRetxAllocAvoid
29527 RgSchDlHqProcCb *proc
29530 PUBLIC Bool rgSCHCmnRetxAllocAvoid(subFrm, cell, proc)
29533 RgSchDlHqProcCb *proc;
29538 TRC2(rgSCHCmnRetxAllocAvoid);
29540 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29542 reqRbs = proc->tbInfo[0].dlGrnt.numRb;
29546 reqRbs = proc->tbInfo[1].dlGrnt.numRb;
29548 /* Consider the dlGrnt.numRb of the Retransmitting proc->tbInfo
29549 * and current available RBs to determine if this RETX TB
29550 * will collide with the BCH/PSS/SSS occassion */
29551 if (subFrm->sfNum % 5 == 0)
29553 if ((subFrm->bwAssigned < cell->pbchRbEnd) &&
29554 (((subFrm->bwAssigned + reqRbs) - cell->pbchRbStart) > 0))
29566 * @brief This function invokes the TM specific DL RETX RB Allocation routine.
29570 * Function: rgSCHCmnDlAllocRetxRb
29571 * Purpose: This function invokes the TM specific
29572 * DL RETX RB Allocation routine.
29574 * Invoked by: Specific Schedulers
29576 * @param[in] RgSchCellCb *cell
29577 * @param[in] RgSchDlSf *subFrm
29578 * @param[in] RgSchUeCb *ue
29579 * @param[in] U32 bo
29580 * @param[out] U32 *effBo
29581 * @param[in] RgSchDlHqProcCb *proc
29582 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29587 PUBLIC S16 rgSCHCmnDlAllocRetxRb
29594 RgSchDlHqProcCb *proc,
29595 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29598 PUBLIC S16 rgSCHCmnDlAllocRetxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29604 RgSchDlHqProcCb *proc;
29605 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29608 U32 newSchBits = 0;
29609 RgSchDlRbAlloc *allocInfo;
29611 TRC2(rgSCHCmnDlAllocRetxRb);
29613 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29615 ue->dl.aggTbBits = 0;
29619 /* Check for DL BW exhaustion */
29620 if (subFrm->bw <= subFrm->bwAssigned)
29624 /* Call TM specific RB allocation routine */
29625 (dlAllocRetxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29626 proc, cellWdAllocInfo);
29630 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29631 /* Calculate totals bits newly allocated */
29632 if (allocInfo->tbInfo[0].schdlngForTb)
29634 newSchBits += allocInfo->tbInfo[0].bytesReq;
29636 if (allocInfo->tbInfo[1].schdlngForTb)
29638 newSchBits += allocInfo->tbInfo[1].bytesReq;
29640 ue->dl.aggTbBits += (newSchBits * 8);
29641 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29649 * @brief This function determines the RBs and Bytes required for
29650 * Transmission on 1 CW.
29654 * Function: rgSCHCmnDlAlloc1CwTxRb
29655 * Purpose: This function determines the RBs and Bytes required
29656 * for Transmission of DL SVC BO on 1 CW.
29657 * Also, takes care of SVC by SVC allocation by tracking
29658 * previous SVCs allocations.
29659 * Returns RFAILED if BO not satisfied at all.
29661 * Invoked by: DL UE Allocation
29663 * @param[in] RgSchCellCb *cell
29664 * @param[in] RgSchDlSf *subFrm
29665 * @param[in] RgSchUeCb *ue
29666 * @param[in] RgSchDlHqTbCb *tbInfo
29667 * @param[in] U32 bo
29668 * @param[out] U8 *numRb
29669 * @param[out] U32 *effBo
29674 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb
29679 RgSchDlHqTbCb *tbInfo,
29685 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, tbInfo, bo, numRb, effBo)
29689 RgSchDlHqTbCb *tbInfo;
29698 RgSchCmnDlUe *ueDl;
29699 RgSchDlRbAlloc *allocInfo;
29702 /* Correcting wrap around issue.
29703 * This change has been done at mutliple places in this function.*/
29705 TRC2(rgSCHCmnDlAlloc1CwTxRb);
29708 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29709 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29710 oldReq = ueDl->outStndAlloc;
29713 //TODO_SID: Currently setting max Tb size wrt to 5GTF TM3
29714 iTbs = ue->ue5gtfCb.mcs;
29715 ueDl->maxTbSz = MAX_5GTF_TB_SIZE * ue->ue5gtfCb.rank;
29716 ueDl->maxRb = MAX_5GTF_PRBS;
29718 ueDl->outStndAlloc += bo;
29719 /* consider Cumulative amount of this BO and bytes so far allocated */
29720 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbSz/8);
29721 /* Get the number of REs needed for this bo. */
29722 //noRes = ((bo * 8 * 1024) / eff);
29724 /* Get the number of RBs needed for this transmission */
29725 /* Number of RBs = No of REs / No of REs per RB */
29726 //tempNumRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29727 tempNumRb = MAX_5GTF_PRBS;
29728 tbSz = RGSCH_MIN(bo, (rgSch5gtfTbSzTbl[iTbs]/8) * ue->ue5gtfCb.rank);
29730 /* DwPts Scheduling Changes End */
29731 *effBo = RGSCH_MIN(tbSz - oldReq, reqBytes);
29734 //RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs);
29739 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbSz, \
29740 iTbs, imcs, tbInfo, ue->ue5gtfCb.rank);
29741 *numRb = (U8) tempNumRb;
29743 /* Update the subframe Allocated BW field */
29744 subFrm->bwAssigned = subFrm->bwAssigned + tempNumRb - allocInfo->rbsReq;
29751 * @brief This function is invoked in the event of any TB's allocation
29752 * being underutilized by the specific scheduler. Here we reduce iMcs
29753 * to increase redundancy and hence increase reception quality at UE.
29757 * Function: rgSCHCmnRdcImcsTxTb
29758 * Purpose: This function shall reduce the iMcs in accordance with
29759 * the total consumed bytes by the UE at allocation
29762 * Invoked by: UE DL Allocation finalization routine
29763 * of specific scheduler.
29765 * @param[in] RgSchDlRbAlloc *allocInfo
29766 * @param[in] U8 tbInfoIdx
29767 * @param[in] U32 cnsmdBytes
29772 PUBLIC Void rgSCHCmnRdcImcsTxTb
29774 RgSchDlRbAlloc *allocInfo,
29779 PUBLIC Void rgSCHCmnRdcImcsTxTb(allocInfo, tbInfoIdx, cnsmdBytes)
29780 RgSchDlRbAlloc *allocInfo;
29786 /*The below functionality is not needed.*/
29791 TRC2(rgSCHCmnRdcImcsTxTb);
29793 iTbs = allocInfo->tbInfo[tbInfoIdx].iTbs;
29794 noLyr = allocInfo->tbInfo[tbInfoIdx].noLyr;
29795 numRb = allocInfo->rbsAlloc;
29798 if ((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) == cnsmdBytes)
29803 /* Get iTbs as suitable for the consumed bytes */
29804 while((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) > cnsmdBytes)
29808 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].\
29809 tbCb->dlGrnt.iMcs);
29815 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].tbCb->dlGrnt.iMcs);
29822 * @brief This function determines the RBs and Bytes required for
29823 * Transmission on 2 CWs.
29827 * Function: rgSCHCmnDlAlloc2CwTxRb
29828 * Purpose: This function determines the RBs and Bytes required
29829 * for Transmission of DL SVC BO on 2 CWs.
29830 * Also, takes care of SVC by SVC allocation by tracking
29831 * previous SVCs allocations.
29832 * Returns RFAILED if BO not satisfied at all.
29834 * Invoked by: TM3 and TM4 DL UE Allocation
29836 * @param[in] RgSchCellCb *cell
29837 * @param[in] RgSchDlSf *subFrm
29838 * @param[in] RgSchUeCb *ue
29839 * @param[in] RgSchDlHqProcCb *proc
29840 * @param[in] RgSchDlHqProcCb bo
29841 * @param[out] U8 *numRb
29842 * @param[out] U32 *effBo
29847 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb
29852 RgSchDlHqProcCb *proc,
29858 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, numRbRef, effBo)
29862 RgSchDlHqProcCb *proc;
29874 RgSchCmnDlCell *cellDl;
29875 RgSchCmnDlUe *ueDl;
29876 RgSchDlRbAlloc *allocInfo;
29879 /* Fix: MUE_PERTTI_DL */
29881 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
29882 U8 cfi = cellSch->dl.currCfi;
29889 TRC2(rgSCHCmnDlAlloc2CwTxRb);
29892 cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
29893 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29894 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29895 oldReq = ueDl->outStndAlloc;
29898 if (ueDl->maxTbBits > ue->dl.aggTbBits)
29900 availBits = ueDl->maxTbBits - ue->dl.aggTbBits;
29902 /* check if we can further allocate to this UE */
29903 if ((ue->dl.aggTbBits >= ueDl->maxTbBits) ||
29904 (allocInfo->tbInfo[0].bytesReq >= ueDl->maxTbSz/8) ||
29905 (allocInfo->tbInfo[1].bytesReq >= ueDl->maxTbSz/8) ||
29906 (allocInfo->rbsReq >= ueDl->maxRb))
29908 RLOG_ARG0(L_DEBUG,DBG_CELLID,cell->cellId,
29909 "rgSCHCmnDlAllocRb(): UEs max allocation exceed");
29913 noLyr1 = ueDl->mimoInfo.cwInfo[0].noLyr;
29914 noLyr2 = ueDl->mimoInfo.cwInfo[1].noLyr;
29916 /* If there is no CFI change, continue to use the BLER based
29918 if (ueDl->lastCfi == cfi)
29920 iTbs1 = ueDl->mimoInfo.cwInfo[0].iTbs[noLyr1 - 1];
29921 iTbs2 = ueDl->mimoInfo.cwInfo[1].iTbs[noLyr2 - 1];
29925 U8 cqi = ueDl->mimoInfo.cwInfo[0].cqi;
29927 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 0, noLyr1);
29929 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 0, noLyr1);
29932 cqi = ueDl->mimoInfo.cwInfo[1].cqi;
29934 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 1, noLyr2);
29936 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 1, noLyr2);
29940 /*ccpu00131191 and ccpu00131317 - Fix for RRC Reconfig failure
29941 * issue for VoLTE call */
29942 //if ((proc->hasDcch) || (TRUE == rgSCHLaaSCellEnabled(cell)))
29962 else if(!cellSch->dl.isDlFreqSel)
29965 /* for Tdd reduce iTbs only for SF0. SF5 contains only
29966 * SSS and can be ignored */
29967 if (subFrm->sfNum == 0)
29969 (iTbs1 > 1)? (iTbs1 -= 1) : (iTbs1 = 0);
29970 (iTbs2 > 1)? (iTbs2 -= 1) : (iTbs2 = 0);
29972 /* For SF 3 and 8 CRC is getting failed in DL.
29973 Need to do proper fix after the replay from
29975 #ifdef CA_PHY_BRDCM_61765
29976 if ((subFrm->sfNum == 3) || (subFrm->sfNum == 8))
29978 (iTbs1 > 2)? (iTbs1 -= 2) : (iTbs1 = 0);
29979 (iTbs2 > 2)? (iTbs2 -= 2) : (iTbs2 = 0);
29987 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
29989 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
29993 eff1 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr1 - 1][cfi]))[iTbs1];
29994 eff2 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr2 - 1][cfi]))[iTbs2];
29997 bo = RGSCH_MIN(bo,availBits/8);
29998 ueDl->outStndAlloc += bo;
29999 /* consider Cumulative amount of this BO and bytes so far allocated */
30000 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbBits/8);
30001 bo = RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff1)/(eff1+eff2)),
30003 RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff2)/(eff1+eff2)),
30004 (ueDl->maxTbSz)/8) +
30005 1; /* Add 1 to adjust the truncation at weighted averaging */
30006 /* Get the number of REs needed for this bo. */
30007 noRes = ((bo * 8 * 1024) / (eff1 + eff2));
30009 /* Get the number of RBs needed for this transmission */
30010 /* Number of RBs = No of REs / No of REs per RB */
30011 numRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
30012 /* Cannot exceed the maximum number of RBs per UE */
30013 if (numRb > ueDl->maxRb)
30015 numRb = ueDl->maxRb;
30020 if(RFAILED == rgSCHLaaCmn2CwAdjustPrb(allocInfo, boTmp, &numRb, ueDl, noLyr1, noLyr2, iTbs1, iTbs2))
30023 while ((numRb <= ueDl->maxRb) &&
30024 (rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1] <= ueDl->maxTbSz) &&
30025 (rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1] <= ueDl->maxTbSz) &&
30026 ((rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8 +
30027 rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8) <= bo))
30033 availBw = subFrm->bw - subFrm->bwAssigned;
30034 /* Cannot exceed the total number of RBs in the cell */
30035 if ((S16)(numRb - allocInfo->rbsReq) > availBw)
30037 numRb = availBw + allocInfo->rbsReq;
30039 tb1Sz = rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8;
30040 tb2Sz = rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8;
30041 /* DwPts Scheduling Changes Start */
30043 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
30045 /* Max Rb for Special Sf is approximated as 4/3 of maxRb */
30046 rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, (U8*)&numRb, ueDl->maxRb*4/3,
30047 &iTbs1, &iTbs2, noLyr1,
30048 noLyr2, &tb1Sz, &tb2Sz, cfi);
30049 /* Check for available Bw */
30050 if ((S16)numRb - allocInfo->rbsReq > availBw)
30052 numRb = availBw + allocInfo->rbsReq;
30053 tb1Sz = rgTbSzTbl[noLyr1-1][iTbs1][RGSCH_MAX(numRb*3/4,1)-1]/8;
30054 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs2][RGSCH_MAX(numRb*3/4,1)-1]/8;
30058 /* DwPts Scheduling Changes End */
30059 /* Update the subframe Allocated BW field */
30060 subFrm->bwAssigned = subFrm->bwAssigned + numRb - \
30063 *effBo = RGSCH_MIN((tb1Sz + tb2Sz) - oldReq, reqBytes);
30066 if (ROK != rgSCHLaaCmn2TBPrbCheck(allocInfo, tb1Sz, tb2Sz, boTmp, effBo, iTbs1, iTbs2, numRb, proc))
30072 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs1, imcs1);
30073 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs2, imcs2);
30074 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tb1Sz, \
30075 iTbs1, imcs1, &proc->tbInfo[0], noLyr1);
30076 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30077 iTbs2, imcs2, &proc->tbInfo[1], noLyr2);
30078 *numRbRef = (U8)numRb;
30086 * @brief This function determines the RBs and Bytes required for
30087 * Transmission & Retransmission on 2 CWs.
30091 * Function: rgSCHCmnDlAlloc2CwTxRetxRb
30092 * Purpose: This function determines the RBs and Bytes required
30093 * for Transmission & Retransmission on 2 CWs. Allocate
30094 * RETX TB on a better CW and restrict new TX TB by
30096 * Returns RFAILED if BO not satisfied at all.
30098 * Invoked by: TM3 and TM4 DL UE Allocation
30100 * @param[in] RgSchCellCb *cell
30101 * @param[in] RgSchDlSf *subFrm
30102 * @param[in] RgSchUeCb *ue
30103 * @param[in] RgSchDlHqTbCb *reTxTb
30104 * @param[in] RgSchDlHqTbCb *txTb
30105 * @param[out] U8 *numRb
30106 * @param[out] U32 *effBo
30111 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb
30116 RgSchDlHqTbCb *reTxTb,
30117 RgSchDlHqTbCb *txTb,
30122 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, reTxTb, txTb, numRb,\
30127 RgSchDlHqTbCb *reTxTb;
30128 RgSchDlHqTbCb *txTb;
30133 RgSchCmnDlUe *ueDl;
30134 RgSchDlRbAlloc *allocInfo;
30138 RgSchCmnDlUeCwInfo *otherCw;
30140 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
30141 U8 cfi = cellDl->currCfi;
30144 TRC2(rgSCHCmnDlAlloc2CwTxRetxRb);
30146 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
30147 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30148 otherCw = &ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)];
30151 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30152 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30154 availBw = subFrm->bw - subFrm->bwAssigned;
30155 *numRb = reTxTb->dlGrnt.numRb;
30157 #ifdef XEON_TDD_SPCL
30158 *numRb = (reTxTb->initTxNumRbs);
30159 if(reTxTb->sfType == RG_SCH_SPL_SF_DATA && subFrm->sfType != RG_SCH_SPL_SF_DATA)
30161 *numRb = (reTxTb->initTxNumRbs*3/4);
30165 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30171 if ((S16)*numRb > availBw)
30175 /* Update the subframe Allocated BW field */
30176 subFrm->bwAssigned += *numRb;
30177 noLyr2 = otherCw->noLyr;
30178 RG_SCH_CMN_GET_MCS_FOR_RETX(reTxTb, imcs1);
30180 /* If there is no CFI change, continue to use the BLER based
30182 if (ueDl->lastCfi == cfi)
30184 iTbs = otherCw->iTbs[noLyr2-1];
30189 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, otherCw->cqi, cfi,
30190 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30192 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, otherCw->cqi, cfi,
30193 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30196 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs][*numRb-1]/8;
30197 /* DwPts Scheduling Changes Start */
30200 /* DwPts Scheduling Changes End */
30201 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs2);
30203 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], reTxTb->tbSz, \
30204 0, imcs1, reTxTb, reTxTb->numLyrs);
30206 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30207 iTbs, imcs2, txTb, noLyr2);
30209 *effBo = reTxTb->tbSz + tb2Sz;
30216 * @brief This function determines the RBs and Bytes required for BO
30217 * Retransmission on 2 CWs.
30221 * Function: rgSCHCmnDlAlloc2CwRetxRb
30222 * Purpose: This function determines the RBs and Bytes required
30223 * for BO Retransmission on 2 CWs. Allocate larger TB
30224 * on a better CW and check if the smaller TB can be
30225 * accomodated on the other CW.
30226 * Returns RFAILED if BO not satisfied at all.
30228 * Invoked by: Common Scheduler
30230 * @param[in] RgSchCellCb *cell
30231 * @param[in] RgSchDlSf *subFrm
30232 * @param[in] RgSchUeCb *ue
30233 * @param[in] RgSchDlHqProcCb *proc
30234 * @param[out] U8 *numRb
30235 * @param[out] Bool *swpFlg
30236 * @param[out] U32 *effBo
30241 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb
30246 RgSchDlHqProcCb *proc,
30252 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc,\
30253 numRb, swpFlg, effBo)
30257 RgSchDlHqProcCb *proc;
30263 RgSchDlRbAlloc *allocInfo;
30266 RgSchDlHqTbCb *lrgTbInfo, *othrTbInfo;
30268 TRC2(rgSCHCmnDlAlloc2CwRetxRb);
30270 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30273 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30274 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30276 lrgTbInfo = &proc->tbInfo[0];
30277 othrTbInfo = &proc->tbInfo[1];
30278 *numRb = lrgTbInfo->dlGrnt.numRb;
30279 #ifdef XEON_TDD_SPCL
30280 if((lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA || othrTbInfo->sfType == RG_SCH_SPL_SF_DATA))
30282 if(lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA)
30284 *numRb = (lrgTbInfo->initTxNumRbs);
30288 *numRb = (othrTbInfo->initTxNumRbs);
30291 if(subFrm->sfType != RG_SCH_SPL_SF_DATA)
30293 *numRb = (*numRb)*3/4;
30298 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30303 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30307 /* Update the subframe Allocated BW field */
30308 subFrm->bwAssigned += *numRb;
30309 RG_SCH_CMN_GET_MCS_FOR_RETX(lrgTbInfo, imcs1);
30310 RG_SCH_CMN_GET_MCS_FOR_RETX(othrTbInfo, imcs2);
30311 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], lrgTbInfo->tbSz, \
30312 0, imcs1, lrgTbInfo, lrgTbInfo->numLyrs);
30313 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], othrTbInfo->tbSz, \
30314 0, imcs2, othrTbInfo, othrTbInfo->numLyrs);
30315 *effBo = lrgTbInfo->tbSz + othrTbInfo->tbSz;
30324 * @brief This function determines the RBs and Bytes required for BO
30325 * Retransmission on 1 CW.
30329 * Function: rgSCHCmnDlAlloc1CwRetxRb
30330 * Purpose: This function determines the RBs and Bytes required
30331 * for BO Retransmission on 1 CW, the first CW.
30332 * Returns RFAILED if BO not satisfied at all.
30334 * Invoked by: Common Scheduler
30336 * @param[in] RgSchCellCb *cell
30337 * @param[in] RgSchDlSf *subFrm
30338 * @param[in] RgSchUeCb *ue
30339 * @param[in] RgSchDlHqTbCb *tbInfo
30340 * @param[in] U8 noLyr
30341 * @param[out] U8 *numRb
30342 * @param[out] U32 *effBo
30347 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb
30352 RgSchDlHqTbCb *tbInfo,
30358 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, tbInfo, noLyr,\
30363 RgSchDlHqTbCb *tbInfo;
30369 RgSchDlRbAlloc *allocInfo;
30372 TRC2(rgSCHCmnDlAlloc1CwRetxRb);
30374 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30377 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30378 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30380 *numRb = tbInfo->dlGrnt.numRb;
30381 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30385 /* Update the subframe Allocated BW field */
30386 subFrm->bwAssigned += *numRb;
30387 imcs = tbInfo->dlGrnt.iMcs;
30388 allocInfo->dciFormat = tbInfo->dlGrnt.dciFormat;
30389 /* Fix: For a RETX TB the iTbs is irrelevant, hence setting 0 */
30390 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbInfo->tbSz, \
30391 0, imcs, tbInfo, tbInfo->numLyrs);
30392 *effBo = tbInfo->tbSz;
30400 * @brief This function is called to handle Release PDCCH feedback for SPS UE
30404 * Function: rgSCHCmnDlRelPdcchFbk
30405 * Purpose: Invokes SPS module to handle release PDCCH feedback
30409 * @param[in] RgSchCellCb *cell
30410 * @param[in] RgSchUeCb *ue
30411 * @param[in] Bool isAck
30416 PUBLIC Void rgSCHCmnDlRelPdcchFbk
30423 PUBLIC Void rgSCHCmnDlRelPdcchFbk(cell, ue, isAck)
30430 TRC2(rgSCHCmnDlRelPdcchFbk);
30431 rgSCHCmnSpsDlRelPdcchFbk(cell, ue, isAck);
30438 * @brief This function is invoked to handle Ack processing for a HARQ proc.
30442 * Function: rgSCHCmnDlProcAck
30443 * Purpose: DTX processing for HARQ proc
30447 * @param[in] RgSchCellCb *cell
30448 * @param[in] RgSchDlHqProcCb *hqP
30453 PUBLIC Void rgSCHCmnDlProcAck
30456 RgSchDlHqProcCb *hqP
30459 PUBLIC Void rgSCHCmnDlProcAck(cell, hqP)
30461 RgSchDlHqProcCb *hqP;
30465 TRC2(rgSCHCmnDlProcAck);
30467 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
30469 /* Invoke SPS module if SPS service was scheduled for this HARQ proc */
30470 rgSCHCmnSpsDlProcAck(cell, hqP);
30474 #ifdef RGSCH_SPS_STATS
30475 extern U32 rgSchStatCrntiCeRcvCnt;
30478 * @brief This function is invoked to handle CRNTI CE reception for an UE
30482 * Function: rgSCHCmnHdlCrntiCE
30483 * Purpose: Handle CRNTI CE reception
30487 * @param[in] RgSchCellCb *cell
30488 * @param[in] RgSchDlHqProcCb *hqP
30493 PUBLIC Void rgSCHCmnHdlCrntiCE
30499 PUBLIC Void rgSCHCmnHdlCrntiCE(cell, ue)
30505 TRC2(rgSCHCmnHdlCrntiCE);
30506 #ifdef RGSCH_SPS_STATS
30507 rgSchStatCrntiCeRcvCnt++;
30510 /* When UL sync lost happened due to TA timer expiry UE is being moved to
30511 PDCCH order inactivity list.But when CRNTI CE received in msg3 from UE
30512 we are not moving UE into active state due to that RRC Reconfiguration is
30514 So here we are moving UE to active list whenever we receive the CRNTI CE and
30516 /* CR ccpu00144525 */
30517 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
30519 /* Activate this UE if it was inactive */
30520 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30521 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30524 /* Handling is same as reception of UE RESET for both DL and UL */
30525 if (ue->dl.dlSpsCfg.isDlSpsEnabled)
30527 rgSCHCmnSpsDlUeReset(cell, ue);
30529 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30531 rgSCHCmnSpsUlUeReset(cell, ue);
30539 * @brief This function is called to handle relInd from MAC for a UE
30543 * Function: rgSCHCmnUlSpsRelInd
30544 * Purpose: Invokes SPS module to handle UL SPS release for a UE
30546 * Invoked by: SCH_UTL
30548 * @param[in] RgSchCellCb *cell
30549 * @param[in] RgSchUeCb *ue
30550 * @param[in] Bool isExplRel
30555 PUBLIC Void rgSCHCmnUlSpsRelInd
30562 PUBLIC Void rgSCHCmnUlSpsRelInd(cell, ue, isExplRel)
30569 TRC2(rgSCHCmnUlSpsRelInd);
30570 rgSCHCmnSpsUlProcRelInd(cell, ue, isExplRel);
30573 } /* end of rgSCHCmnUlSpsRelInd */
30576 * @brief This function is called to handle SPS Activate Ind from MAC for a UE
30580 * Function: rgSCHCmnUlSpsActInd
30581 * Purpose: Invokes SPS module to handle UL SPS activate for a UE
30583 * Invoked by: SCH_UTL
30585 * @param[in] RgSchCellCb *cell
30586 * @param[in] RgSchUeCb *ue
30591 PUBLIC Void rgSCHCmnUlSpsActInd
30598 PUBLIC Void rgSCHCmnUlSpsActInd(cell, ue,spsSduSize)
30605 TRC2(rgSCHCmnUlSpsActInd);
30607 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30609 rgSCHCmnSpsUlProcActInd(cell, ue,spsSduSize);
30613 } /* end of rgSCHCmnUlSpsActInd */
30616 * @brief This function is called to handle CRC in UL for UEs
30617 * undergoing SPS release
30621 * Function: rgSCHCmnUlCrcInd
30622 * Purpose: Invokes SPS module to handle CRC in UL for SPS UE
30624 * Invoked by: SCH_UTL
30626 * @param[in] RgSchCellCb *cell
30627 * @param[in] RgSchUeCb *ue
30628 * @param[in] CmLteTimingInfo crcTime
30633 PUBLIC Void rgSCHCmnUlCrcInd
30637 CmLteTimingInfo crcTime
30640 PUBLIC Void rgSCHCmnUlCrcInd(cell, ue, crcTime)
30643 CmLteTimingInfo crcTime;
30647 TRC2(rgSCHCmnUlCrcInd);
30648 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30650 rgSCHCmnSpsUlProcCrcInd(cell, ue, crcTime);
30654 } /* end of rgSCHCmnUlCrcFailInd */
30657 * @brief This function is called to handle CRC failure in UL
30661 * Function: rgSCHCmnUlCrcFailInd
30662 * Purpose: Invokes SPS module to handle CRC failure in UL for SPS UE
30664 * Invoked by: SCH_UTL
30666 * @param[in] RgSchCellCb *cell
30667 * @param[in] RgSchUeCb *ue
30668 * @param[in] CmLteTimingInfo crcTime
30673 PUBLIC Void rgSCHCmnUlCrcFailInd
30677 CmLteTimingInfo crcTime
30680 PUBLIC Void rgSCHCmnUlCrcFailInd(cell, ue, crcTime)
30683 CmLteTimingInfo crcTime;
30687 TRC2(rgSCHCmnUlCrcFailInd);
30688 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30690 rgSCHCmnSpsUlProcDtxInd(cell, ue, crcTime);
30694 } /* end of rgSCHCmnUlCrcFailInd */
30696 #endif /* LTEMAC_SPS */
30699 * @brief BCH,BCCH,PCCH Dowlink Scheduling Handler.
30703 * Function: rgSCHCmnDlBcchPcchAlloc
30704 * Purpose: This function calls common scheduler APIs to
30705 * schedule for BCCH/PCCH.
30706 * It then invokes Allocator for actual RB
30707 * allocations. It processes on the actual resources allocated
30708 * against requested to the allocator module.
30710 * Invoked by: Common Scheduler
30712 * @param[in] RgSchCellCb *cell
30716 PRIVATE Void rgSCHCmnDlBcchPcchAlloc
30721 PRIVATE Void rgSCHCmnDlBcchPcchAlloc(cell)
30726 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
30728 #ifdef LTEMAC_HDFDD
30729 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
30731 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
30734 RgInfSfAlloc *nextsfAlloc = &(cell->sfAllocArr[nextSfIdx]);
30735 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30736 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
30738 TRC2(rgSCHCmnDlBcchPcchAlloc);
30741 /*Reset the bitmask for BCCH/PCCH*/
30742 rgSCHUtlResetSfAlloc(nextsfAlloc,TRUE,FALSE);
30743 #ifndef DISABLE_MIB_SIB /* Not sending MIB and SIB to CL */
30745 rgSCHChkNUpdSiCfg(cell);
30746 rgSCHSelectSi(cell);
30749 /*Perform the scheduling for BCCH,PCCH*/
30750 rgSCHCmnDlBcchPcch(cell, allocInfo, nextsfAlloc);
30752 /* Call common allocator for RB Allocation */
30753 rgSCHBcchPcchDlRbAlloc(cell, allocInfo);
30755 /* Finalize the Allocations for reqested Against alloced */
30756 rgSCHCmnDlBcchPcchFnlz(cell, allocInfo);
30757 #endif /* DISABLE_MIB_SIB */
30762 * @brief Handles RB allocation for BCCH/PCCH for downlink.
30766 * Function : rgSCHBcchPcchDlRbAlloc
30768 * Invoking Module Processing:
30769 * - This function is invoked for DL RB allocation of BCCH/PCCH
30771 * Processing Steps:
30772 * - If cell is frequency selecive,
30773 * - Call rgSCHDlfsBcchPcchAllocRb().
30775 * - Do the processing
30777 * @param[in] RgSchCellCb *cell
30778 * @param[in] RgSchDlRbAllocInfo *allocInfo
30783 PRIVATE Void rgSCHBcchPcchDlRbAlloc
30786 RgSchCmnDlRbAllocInfo *allocInfo
30789 PRIVATE Void rgSCHBcchPcchDlRbAlloc(cell, allocInfo)
30791 RgSchCmnDlRbAllocInfo *allocInfo;
30794 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30796 TRC2(rgSCHBcchPcchDlRbAlloc);
30799 if (cellSch->dl.isDlFreqSel)
30801 cellSch->apisDlfs->rgSCHDlfsBcchPcchAllocRb(cell, allocInfo);
30805 rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo);
30812 * @brief Handles RB allocation for BCCH,PCCH for frequency
30813 * non-selective cell.
30817 * Function : rgSCHCmnNonDlfsBcchPcchRbAlloc
30819 * Invoking Module Processing:
30820 * - SCH shall invoke this if downlink frequency selective is disabled for
30821 * the cell for RB allocation.
30822 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
30823 * estimate and subframe for each allocation to be made to SCH.
30825 * Processing Steps:
30826 * - Allocate sequentially for BCCH,PCCH common channels.
30828 * @param[in] RgSchCellCb *cell
30829 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
30834 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc
30837 RgSchCmnDlRbAllocInfo *allocInfo
30840 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo)
30842 RgSchCmnDlRbAllocInfo *allocInfo;
30845 RgSchDlRbAlloc *reqAllocInfo;
30847 TRC2(rgSCHCmnNonDlfsBcchPcchRbAlloc);
30850 /* Allocate for PCCH */
30851 reqAllocInfo = &(allocInfo->pcchAlloc);
30852 if (reqAllocInfo->rbsReq)
30854 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30856 /* Allocate for BCCH on DLSCH */
30857 reqAllocInfo = &(allocInfo->bcchAlloc);
30858 if (reqAllocInfo->rbsReq)
30860 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30868 * @brief This function implements the handling to check and
30869 * update the SI cfg at the start of the modificiation period.
30873 * Function: rgSCHChkNUpdSiCfg
30874 * Purpose: This function implements handling for update of SI Cfg
30875 * at the start of modification period.
30877 * Invoked by: Scheduler
30879 * @param[in] RgSchCellCb* cell
30885 PRIVATE Void rgSCHChkNUpdSiCfg
30890 PRIVATE Void rgSCHChkNUpdSiCfg(cell)
30894 CmLteTimingInfo pdSchTmInfo;
30896 TRC2(rgSCHChkNUpdSiCfg);
30899 pdSchTmInfo = cell->crntTime;
30900 #ifdef LTEMAC_HDFDD
30901 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
30902 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
30903 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
30905 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA);
30909 /* Updating the SIB1 for Warning SI message immediately after it is received
30910 * from application. No need to wait for next modification period.
30912 if((pdSchTmInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
30913 && (RGSCH_SIB1_TX_SF_NUM == (pdSchTmInfo.slot % RGSCH_NUM_SUB_FRAMES)))
30915 /*Check whether SIB1 with PWS has been updated*/
30916 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_PWS_UPD)
30918 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30919 cell->siCb.newSiInfo.sib1Info.sib1);
30920 cell->siCb.crntSiInfo.sib1Info.mcs =
30921 cell->siCb.newSiInfo.sib1Info.mcs;
30922 cell->siCb.crntSiInfo.sib1Info.nPrb =
30923 cell->siCb.newSiInfo.sib1Info.nPrb;
30924 cell->siCb.crntSiInfo.sib1Info.msgLen =
30925 cell->siCb.newSiInfo.sib1Info.msgLen;
30926 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_PWS_UPD;
30930 /*Check if this SFN and SF No marks the start of next modification
30931 period. If current SFN,SF No doesn't marks the start of next
30932 modification period, then return. */
30933 if(!((pdSchTmInfo.sfn % cell->siCfg.modPrd == 0)
30934 && (0 == pdSchTmInfo.slot)))
30935 /*if(!((((pdSchTmInfo.hSfn * 1024) + pdSchTmInfo.sfn) % cell->siCfg.modPrd == 0)
30936 && (0 == pdSchTmInfo.slot)))*/
30941 /*Check whether MIB has been updated*/
30942 if(cell->siCb.siBitMask & RGSCH_SI_MIB_UPD)
30944 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.mib,
30945 cell->siCb.newSiInfo.mib);
30946 cell->siCb.siBitMask &= ~RGSCH_SI_MIB_UPD;
30949 /*Check whether SIB1 has been updated*/
30950 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_UPD)
30952 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30953 cell->siCb.newSiInfo.sib1Info.sib1);
30954 cell->siCb.crntSiInfo.sib1Info.mcs = cell->siCb.newSiInfo.sib1Info.mcs;
30955 cell->siCb.crntSiInfo.sib1Info.nPrb = cell->siCb.newSiInfo.sib1Info.nPrb;
30956 cell->siCb.crntSiInfo.sib1Info.msgLen =
30957 cell->siCb.newSiInfo.sib1Info.msgLen;
30958 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_UPD;
30961 /*Check whether SIs have been updated*/
30962 if(cell->siCb.siBitMask & RGSCH_SI_SI_UPD)
30966 /*Check if SI cfg have been modified And Check if numSi have
30967 been changed, if yes then we would need to update the
30968 pointers for all the SIs */
30969 if((cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD) &&
30970 (cell->siCfg.numSi != cell->siCb.newSiCfg.numSi))
30972 for(idx = 0;idx < cell->siCb.newSiCfg.numSi;idx++)
30974 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30975 cell->siCb.newSiInfo.siInfo[idx].si);
30976 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30977 cell->siCb.siArray[idx].isWarningSi = FALSE;
30979 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30980 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30981 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30984 /*If numSi have been reduced then we need to free the
30985 pointers at the indexes in crntSiInfo which haven't
30986 been exercised. If numSi has increased then nothing
30987 additional is requires as above handling has taken
30989 if(cell->siCfg.numSi > cell->siCb.newSiCfg.numSi)
30991 for(idx = cell->siCb.newSiCfg.numSi;
30992 idx < cell->siCfg.numSi;idx++)
30994 RGSCH_FREE_MSG(cell->siCb.crntSiInfo.siInfo[idx].si);
30995 cell->siCb.siArray[idx].si = NULLP;
31001 /*numSi has not been updated, we just need to update the
31002 pointers for the SIs which are set to NON NULLP */
31003 /*ccpu00118260 - Correct Update of SIB2 */
31004 for(idx = 0;idx < cell->siCfg.numSi;idx++)
31006 if(NULLP != cell->siCb.newSiInfo.siInfo[idx].si)
31008 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
31009 cell->siCb.newSiInfo.siInfo[idx].si);
31011 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
31012 cell->siCb.siArray[idx].isWarningSi = FALSE;
31013 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
31014 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
31015 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
31019 cell->siCb.siBitMask &= ~RGSCH_SI_SI_UPD;
31022 /*Check whether SI cfg have been updated*/
31023 if(cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD)
31025 cell->siCfg = cell->siCb.newSiCfg;
31026 cell->siCb.siBitMask &= ~RGSCH_SI_SICFG_UPD;
31034 * @brief This function implements the selection of the SI
31035 * that is to be scheduled.
31039 * Function: rgSCHSelectSi
31040 * Purpose: This function implements the selection of SI
31041 * that is to be scheduled.
31043 * Invoked by: Scheduler
31045 * @param[in] RgSchCellCb* cell
31051 PRIVATE Void rgSCHSelectSi
31056 PRIVATE Void rgSCHSelectSi(cell)
31060 CmLteTimingInfo crntTmInfo;
31065 TRC2(rgSCHSelectSi);
31068 crntTmInfo = cell->crntTime;
31069 #ifdef LTEMAC_HDFDD
31070 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31071 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31072 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31074 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA);
31077 siWinSize = cell->siCfg.siWinSize;
31079 /* Select SI only once at the starting of the new window */
31080 if(cell->siCb.inWindow)
31082 if ((crntTmInfo.sfn % cell->siCfg.minPeriodicity) == 0 &&
31083 crntTmInfo.slot == 0)
31085 /* Reinit inWindow at the beginning of every SI window */
31086 cell->siCb.inWindow = siWinSize - 1;
31090 cell->siCb.inWindow--;
31094 else /* New window. Re-init the winSize counter with the window length */
31096 if((cell->siCb.siArray[cell->siCb.siCtx.siId - 1].isWarningSi == TRUE)&&
31097 (cell->siCb.siCtx.retxCntRem != 0))
31099 rgSCHUtlFreeWarningSiPdu(cell);
31100 cell->siCb.siCtx.warningSiFlag = FALSE;
31103 cell->siCb.inWindow = siWinSize - 1;
31106 x = rgSCHCmnGetSiSetId(crntTmInfo.sfn, crntTmInfo.slot,
31107 cell->siCfg.minPeriodicity);
31109 /* Window Id within a SI set. This window Id directly maps to a
31111 windowId = (((crntTmInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) +
31112 crntTmInfo.slot) - (x * (cell->siCfg.minPeriodicity * 10)))
31115 if(windowId >= RGR_MAX_NUM_SI)
31118 /* Update the siCtx if there is a valid SI and its periodicity
31120 if (NULLP != cell->siCb.siArray[windowId].si)
31122 /* Warning SI Periodicity is same as SIB2 Periodicity */
31123 if(((cell->siCb.siArray[windowId].isWarningSi == FALSE) &&
31124 (x % (cell->siCfg.siPeriodicity[windowId]
31125 /cell->siCfg.minPeriodicity) == 0)) ||
31126 ((cell->siCb.siArray[windowId].isWarningSi == TRUE) &&
31127 (x % (cell->siCfg.siPeriodicity[0]
31128 /cell->siCfg.minPeriodicity) == 0)))
31130 cell->siCb.siCtx.siId = windowId+1;
31131 cell->siCb.siCtx.retxCntRem = cell->siCfg.retxCnt;
31132 cell->siCb.siCtx.warningSiFlag = cell->siCb.siArray[windowId].
31134 cell->siCb.siCtx.timeToTx.sfn = crntTmInfo.sfn;
31135 cell->siCb.siCtx.timeToTx.slot = crntTmInfo.slot;
31137 RG_SCH_ADD_TO_CRNT_TIME(cell->siCb.siCtx.timeToTx,
31138 cell->siCb.siCtx.maxTimeToTx, (siWinSize - 1))
31142 {/* Update the siCtx with invalid si Id */
31143 cell->siCb.siCtx.siId = 0;
31151 * @brief This function implements scheduler DL allocation for
31156 * Function: rgSCHDlSiSched
31157 * Purpose: This function implements scheduler for DL allocation
31160 * Invoked by: Scheduler
31162 * @param[in] RgSchCellCb* cell
31168 PRIVATE Void rgSCHDlSiSched
31171 RgSchCmnDlRbAllocInfo *allocInfo,
31172 RgInfSfAlloc *subfrmAlloc
31175 PRIVATE Void rgSCHDlSiSched(cell, allocInfo, subfrmAlloc)
31177 RgSchCmnDlRbAllocInfo *allocInfo;
31178 RgInfSfAlloc *subfrmAlloc;
31181 CmLteTimingInfo crntTimInfo;
31187 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31188 /* DwPTS Scheduling Changes Start */
31191 U8 cfi = cellDl->currCfi;
31193 /* DwPTS Scheduling Changes End */
31195 TRC2(rgSCHDlSiSched);
31198 crntTimInfo = cell->crntTime;
31199 #ifdef LTEMAC_HDFDD
31200 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31201 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31202 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31204 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA);
31207 /* Compute the subframe for which allocation is being made.
31208 Essentially, we need pointer to the dl frame for this subframe */
31209 sf = rgSCHUtlSubFrmGet(cell, crntTimInfo);
31211 /*Check if scheduling of MIB is required */
31213 /* since we are adding the MIB repetition logic for EMTC UEs, checking if
31214 * emtcEnabled or not, If enabled MIB would be repeted at as part of EMTC
31215 * feature, otherwise scheduling at (n,0) */
31216 if(0 == cell->emtcEnable)
31219 if((crntTimInfo.sfn % RGSCH_MIB_PERIODICITY == 0)
31220 && (RGSCH_MIB_TX_SF_NUM == crntTimInfo.slot))
31223 U8 sfnOctet, mibOct2 = 0;
31225 /*If MIB has not been yet setup by Application, return*/
31226 if(NULLP == cell->siCb.crntSiInfo.mib)
31229 SFndLenMsg(cell->siCb.crntSiInfo.mib, &mibLen);
31230 sf->bch.tbSize = mibLen;
31231 /*Fill the interface information */
31232 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, NULLD, NULLD);
31234 /*Set the bits of MIB to reflect SFN */
31235 /*First get the Most signficant 8 bits of SFN */
31236 sfnOctet = (U8)(crntTimInfo.sfn >> 2);
31237 /*Get the first two octets of MIB, and then update them
31238 using the SFN octet value obtained above.*/
31239 if(ROK != SExamMsg((Data *)(&mibOct1),
31240 cell->siCb.crntSiInfo.mib, 0))
31243 if(ROK != SExamMsg((Data *)(&mibOct2),
31244 cell->siCb.crntSiInfo.mib, 1))
31247 /* ccpu00114572- Fix for improper way of MIB Octet setting for SFN */
31248 mibOct1 = (mibOct1 & 0xFC) | (sfnOctet >> 6);
31249 mibOct2 = (mibOct2 & 0x03) | (sfnOctet << 2);
31250 /* ccpu00114572- Fix ends*/
31252 /*Now, replace the two octets in MIB */
31253 if(ROK != SRepMsg((Data)(mibOct1),
31254 cell->siCb.crntSiInfo.mib, 0))
31257 if(ROK != SRepMsg((Data)(mibOct2),
31258 cell->siCb.crntSiInfo.mib, 1))
31261 /*Copy the MIB msg buff into interface buffer */
31262 SCpyMsgMsg(cell->siCb.crntSiInfo.mib,
31263 rgSchCb[cell->instIdx].rgSchInit.region,
31264 rgSchCb[cell->instIdx].rgSchInit.pool,
31265 &subfrmAlloc->cmnLcInfo.bchInfo.pdu);
31266 /* Added Dl TB count for MIB message transmission
31267 * This counter is incremented 4 times to consider
31268 * the retransmission at the PHY level on PBCH channel*/
31270 cell->dlUlTbCnt.tbTransDlTotalCnt += RG_SCH_MIB_CNT;
31277 allocInfo->bcchAlloc.schdFirst = FALSE;
31278 /*Check if scheduling of SIB1 is required.
31279 Check of (crntTimInfo.sfn % RGSCH_SIB1_PERIODICITY == 0)
31280 is not required here since the below check takes care
31281 of SFNs applicable for this one too.*/
31282 if((crntTimInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
31283 && (RGSCH_SIB1_TX_SF_NUM == crntTimInfo.slot))
31285 /*If SIB1 has not been yet setup by Application, return*/
31286 if(NULLP == (cell->siCb.crntSiInfo.sib1Info.sib1))
31291 allocInfo->bcchAlloc.schdFirst = TRUE;
31292 mcs = cell->siCb.crntSiInfo.sib1Info.mcs;
31293 nPrb = cell->siCb.crntSiInfo.sib1Info.nPrb;
31294 msgLen = cell->siCb.crntSiInfo.sib1Info.msgLen;
31298 /*Check if scheduling of SI can be performed.*/
31299 Bool invalid = FALSE;
31301 if(cell->siCb.siCtx.siId == 0)
31304 /*Check if the Si-Window for the current Si-Context is completed*/
31305 invalid = rgSCHCmnChkPastWin(crntTimInfo, cell->siCb.siCtx.maxTimeToTx);
31308 /* LTE_ADV_FLAG_REMOVED_START */
31309 if(cell->siCb.siCtx.retxCntRem)
31311 RGSCHLOGERROR(cell->instIdx,ERRCLS_INT_PAR,ERG011,(ErrVal)cell->siCb.siCtx.siId,
31312 "rgSCHDlSiSched(): SI not scheduled and window expired");
31314 /* LTE_ADV_FLAG_REMOVED_END */
31315 if(cell->siCb.siCtx.warningSiFlag == TRUE)
31317 rgSCHUtlFreeWarningSiPdu(cell);
31318 cell->siCb.siCtx.warningSiFlag = FALSE;
31323 /*Check the timinginfo of the current SI-Context to see if its
31324 transmission can be scheduled. */
31325 if(FALSE == (rgSCHCmnChkInWin(crntTimInfo,
31326 cell->siCb.siCtx.timeToTx,
31327 cell->siCb.siCtx.maxTimeToTx)))
31332 /*Check if retransmission count has become 0*/
31333 if(0 == cell->siCb.siCtx.retxCntRem)
31338 /* LTE_ADV_FLAG_REMOVED_START */
31339 /* Check if ABS is enabled/configured */
31340 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31342 /* The pattern type is RGR_ABS_MUTE, then eNB need to blank the subframe */
31343 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
31345 /* Determine next scheduling subframe is ABS or not */
31346 if(RG_SCH_ABS_ENABLED_ABS_SF == (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern
31347 [((crntTimInfo.sfn*RGSCH_NUM_SUB_FRAMES) + crntTimInfo.slot) % RGR_ABS_PATTERN_LEN]))
31349 /* Skip the SI scheduling to next tti */
31354 /* LTE_ADV_FLAG_REMOVED_END */
31356 /*Schedule the transmission of the current SI-Context */
31357 /*Find out the messg length for the SI message */
31358 /* warningSiFlag is to differentiate between Warning SI
31360 if((rgSCHUtlGetMcsAndNPrb(cell, &nPrb, &mcs, &msgLen)) != ROK)
31365 cell->siCb.siCtx.i = RGSCH_CALC_SF_DIFF(crntTimInfo,
31366 cell->siCb.siCtx.timeToTx);
31370 /*Get the number of rb required */
31371 /*rgSCHCmnClcRbAllocForFxdTb(cell, msgLen, cellDl->ccchCqi, &rb);*/
31372 if(cellDl->bitsPerRb==0)
31374 while ((rgTbSzTbl[0][0][rb]) < (U32) (msgLen*8))
31382 rb = RGSCH_CEIL((msgLen*8), cellDl->bitsPerRb);
31384 /* DwPTS Scheduling Changes Start */
31386 if (sf->sfType == RG_SCH_SPL_SF_DATA)
31388 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
31390 /* Calculate the less RE's because of DwPTS */
31391 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
31393 /* Increase number of RBs in Spl SF to compensate for lost REs */
31394 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
31397 /* DwPTS Scheduling Changes End */
31398 /*ccpu00115595- end*/
31399 /* Additional check to see if required RBs
31400 * exceeds the available */
31401 if (rb > sf->bw - sf->bwAssigned)
31403 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHDlSiSched(): "
31404 "BW allocation failed CRNTI:%d",RGSCH_SI_RNTI);
31408 /* Update the subframe Allocated BW field */
31409 sf->bwAssigned = sf->bwAssigned + rb;
31411 /*Fill the parameters in allocInfo */
31412 allocInfo->bcchAlloc.rnti = RGSCH_SI_RNTI;
31413 allocInfo->bcchAlloc.dlSf = sf;
31414 allocInfo->bcchAlloc.rbsReq = rb;
31415 /*ccpu00116710- MCS is not getting assigned */
31416 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
31418 /* ccpu00117510 - ADD - Assignment of nPrb and other information */
31419 allocInfo->bcchAlloc.nPrb = nPrb;
31420 allocInfo->bcchAlloc.tbInfo[0].bytesReq = msgLen;
31421 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
31424 #endif /*RGR_SI_SCH*/
31427 /* ccpu00117452 - MOD - Changed macro name from
31428 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
31429 #ifdef RGR_CQI_REPT
31431 * @brief This function Updates the DL CQI for the UE.
31435 * Function: rgSCHCmnUeDlPwrCtColltCqiRept
31436 * Purpose: Manages PUSH N CQI reporting
31437 * Step 1: Store the CQI in collation array
31438 * Step 2: Increament the tracking count
31439 * Step 3: Check is it time to to send the report
31440 * Step 4: if yes, Send StaInd to RRM
31441 * Step 4.1: Fill StaInd for sending collated N CQI rpeorts
31442 * Step 4.2: Call utility function (rgSCHUtlRgrStaInd) to send rpts to RRM
31443 * Step 4.2.1: If sending was not sucessful, return RFAILED
31444 * Step 4.2.2: If sending was sucessful, return ROK
31445 * Step 5: If no, return
31446 * Invoked by: rgSCHCmnDlCqiInd
31448 * @param[in] RgSchCellCb *cell
31449 * @param[in] RgSchUeCb *ue
31450 * @param[in] RgrUeCqiRept *ueCqiRpt
31455 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept
31459 RgrUeCqiRept *ueCqiRpt
31462 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, ueCqiRpt)
31465 RgrUeCqiRept *ueCqiRpt;
31468 U8 *cqiCount = NULLP;
31470 RgrStaIndInfo *staInfo = NULLP;
31472 TRC2(rgSCHCmnUeDlPwrCtColltCqiRept)
31474 /* Step 1: Store the CQI in collation array */
31475 /* Step 2: Increament the tracking count */
31476 cqiCount = &(ue->schCqiInfo.cqiCount);
31477 ue->schCqiInfo.cqiRept[(*cqiCount)++] =
31481 /* Step 3: Check is it time to to send the report */
31482 if(RG_SCH_CQIR_IS_TIMTOSEND_CQIREPT(ue))
31484 /* Step 4: if yes, Send StaInd to RRM */
31485 retVal = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&staInfo,
31486 sizeof(RgrStaIndInfo));
31489 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
31490 "allocate memory for sending StaInd CRNTI:%d",ue->ueId);
31494 /* Step 4.1: Fill StaInd for sending collated N CQI rpeorts */
31497 extern U32 gCqiReptToAppCount;
31498 gCqiReptToAppCount++;
31503 retVal = rgSCHUtlFillSndStaInd(cell, ue, staInfo,
31504 ue->cqiReptCfgInfo.numColltdCqiRept);
31510 } /* End of rgSCHCmnUeDlPwrCtColltCqiRept */
31512 #endif /* End of RGR_CQI_REPT */
31515 * @brief This function checks for the retransmisson
31516 * for a DTX scenario.
31523 * @param[in] RgSchCellCb *cell
31524 * @param[in] RgSchUeCb *ue
31530 PUBLIC Void rgSCHCmnChkRetxAllowDtx
31534 RgSchDlHqProcCb *proc,
31538 PUBLIC Void rgSCHCmnChkRetxAllowDtx(cell, ueCb, proc, reTxAllwd)
31541 RgSchDlHqProcCb *proc;
31545 TRC3(rgSCHCmnChkRetxAllowDtx)
31550 if ((proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX))
31552 *reTxAllwd = FALSE;
31559 * @brief API for calculating the SI Set Id
31563 * Function: rgSCHCmnGetSiSetId
31565 * This API is used for calculating the SI Set Id, as shown below
31567 * siSetId = 0 siSetId = 1
31568 * |******************|******************|---------------->
31569 * (0,0) (8,0) (16,0) (SFN, SF)
31572 * @param[in] U16 sfn
31574 * @return U16 siSetId
31577 PUBLIC U16 rgSCHCmnGetSiSetId
31584 PUBLIC U16 rgSCHCmnGetSiSetId(sfn, sf, minPeriodicity)
31587 U16 minPeriodicity;
31590 /* 80 is the minimum SI periodicity in sf. Also
31591 * all other SI periodicities are multiples of 80 */
31592 RETVALUE (((sfn * RGSCH_NUM_SUB_FRAMES_5G) + sf) / (minPeriodicity * 10));
31596 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31600 * Function: rgSCHCmnCalcDwPtsTbSz
31602 * @param[in] RgSchCellCb *cell
31603 * @param[in] U32 bo
31604 * @param[in/out] U8 *rb
31605 * @param[in/out] U8 *iTbs
31606 * @param[in] U8 lyr
31607 * @param[in] U8 cfi
31611 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz
31621 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz(cell, bo, rb, iTbs, lyr, cfi)
31631 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31632 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31633 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31635 TRC2(rgSCHCmnCalcDwPtsTbSz);
31637 /* DwPts Rb cannot exceed the cell Bw */
31638 numDwPtsRb = RGSCH_MIN(numDwPtsRb, cellDl->maxDlBwPerUe);
31640 /* Adjust the iTbs for optimum usage of the DwPts region.
31641 * Using the same iTbs adjustment will not work for all
31642 * special subframe configurations and iTbs levels. Hence use the
31643 * static iTbs Delta table for adjusting the iTbs */
31644 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs);
31648 while(rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1] < bo*8 &&
31649 numDwPtsRb < cellDl->maxDlBwPerUe)
31654 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31658 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31666 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31670 * Function: rgSCHCmnCalcDwPtsTbSz2Cw
31672 * @param[in] RgSchCellCb *cell
31673 * @param[in] U32 bo
31674 * @param[in/out] U8 *rb
31675 * @param[in] U8 maxRb
31676 * @param[in/out] U8 *iTbs1
31677 * @param[in/out] U8 *iTbs2
31678 * @param[in] U8 lyr1
31679 * @param[in] U8 lyr2
31680 * @return[in/out] U32 *tb1Sz
31681 * @return[in/out] U32 *tb2Sz
31682 * @param[in] U8 cfi
31685 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw
31700 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, rb, maxRb, iTbs1, iTbs2,
31701 lyr1, lyr2, tb1Sz, tb2Sz, cfi)
31715 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31716 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31717 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31719 TRC2(rgSCHCmnCalcDwPtsTbSz2Cw);
31721 /* DwPts Rb cannot exceed the cell Bw */
31722 numDwPtsRb = RGSCH_MIN(numDwPtsRb, maxRb);
31724 /* Adjust the iTbs for optimum usage of the DwPts region.
31725 * Using the same iTbs adjustment will not work for all
31726 * special subframe configurations and iTbs levels. Hence use the
31727 * static iTbs Delta table for adjusting the iTbs */
31728 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs1);
31729 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs2);
31731 while((rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1] +
31732 rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1])< bo*8 &&
31733 numDwPtsRb < maxRb)
31738 *tb1Sz = rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31739 *tb2Sz = rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31749 * @brief Updates the GBR LCGs when datInd is received from MAC
31753 * Function: rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31754 * Purpose: This function updates the GBR LCGs
31755 * when datInd is received from MAC.
31759 * @param[in] RgSchCellCb *cell
31760 * @param[in] RgSchUeCb *ue
31761 * @param[in] RgInfUeDatInd *datInd
31765 PUBLIC Void rgSCHCmnUpdUeDataIndLcg
31769 RgInfUeDatInd *datInd
31772 PUBLIC Void rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31775 RgInfUeDatInd *datInd;
31779 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31781 Inst inst = cell->instIdx;
31784 TRC2(rgSCHCmnUpdUeDataIndLcg);
31786 for (idx = 0; (idx < RGINF_MAX_LCG_PER_UE - 1); idx++)
31788 if (datInd->lcgInfo[idx].bytesRcvd != 0)
31790 U8 lcgId = datInd->lcgInfo[idx].lcgId;
31791 U32 bytesRcvd = datInd->lcgInfo[idx].bytesRcvd;
31793 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
31795 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
31796 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
31798 if(bytesRcvd > cmnLcg->effGbr)
31800 bytesRcvd -= cmnLcg->effGbr;
31801 cmnLcg->effDeltaMbr = (cmnLcg->effDeltaMbr > bytesRcvd) ? \
31802 (cmnLcg->effDeltaMbr - bytesRcvd) : (0);
31803 cmnLcg->effGbr = 0;
31807 cmnLcg->effGbr -= bytesRcvd;
31809 /* To keep BS updated with the amount of data received for the GBR */
31810 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31811 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31812 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr+cmnLcg->effDeltaMbr);
31814 else if(lcgId != 0)
31816 ue->ul.effAmbr = (ue->ul.effAmbr > datInd->lcgInfo[idx].bytesRcvd) ? \
31817 (ue->ul.effAmbr - datInd->lcgInfo[idx].bytesRcvd) : (0);
31818 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31819 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31820 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
31821 ue->ul.nonGbrLcgBs = (ue->ul.nonGbrLcgBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31822 (ue->ul.nonGbrLcgBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31824 ue->ul.nonLcg0Bs = (ue->ul.nonLcg0Bs > datInd->lcgInfo[idx].bytesRcvd) ? \
31825 (ue->ul.nonLcg0Bs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31834 if(TRUE == ue->isEmtcUe)
31836 if (cellSch->apisEmtcUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31838 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31845 if (cellSch->apisUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31847 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31853 /** @brief This function initializes DL allocation lists and prepares
31858 * Function: rgSCHCmnInitRbAlloc
31860 * @param [in] RgSchCellCb *cell
31866 PRIVATE Void rgSCHCmnInitRbAlloc
31871 PRIVATE Void rgSCHCmnInitRbAlloc (cell)
31875 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31876 CmLteTimingInfo frm;
31880 TRC2(rgSCHCmnInitRbAlloc);
31882 /* Initializing RgSchCmnUlRbAllocInfo structure.*/
31883 rgSCHCmnInitDlRbAllocInfo(&cellSch->allocInfo);
31885 frm = cellSch->dl.time;
31887 dlSf = rgSCHUtlSubFrmGet(cell, frm);
31889 dlSf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
31890 dlSf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
31891 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
31893 dlSf->sfBeamInfo[idx].totVrbgAllocated = 0;
31894 dlSf->sfBeamInfo[idx].totVrbgRequired = 0;
31895 dlSf->sfBeamInfo[idx].vrbgStart = 0;
31898 dlSf->remUeCnt = cellSch->dl.maxUePerDlSf;
31899 /* Updating the Subframe information in RBAllocInfo */
31900 cellSch->allocInfo.dedAlloc.dedDlSf = dlSf;
31901 cellSch->allocInfo.msg4Alloc.msg4DlSf = dlSf;
31903 /* LTE_ADV_FLAG_REMOVED_START */
31904 /* Determine next scheduling subframe is ABS or not */
31905 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31907 cell->lteAdvCb.absPatternDlIdx =
31908 ((frm.sfn*RGSCH_NUM_SUB_FRAMES_5G) + frm.slot) % RGR_ABS_PATTERN_LEN;
31909 cell->lteAdvCb.absDlSfInfo = (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern[
31910 cell->lteAdvCb.absPatternDlIdx]);
31915 cell->lteAdvCb.absDlSfInfo = RG_SCH_ABS_DISABLED;
31917 /* LTE_ADV_FLAG_REMOVED_END */
31920 cellSch->allocInfo.ccchSduAlloc.ccchSduDlSf = dlSf;
31923 /* Update subframe-wide allocation information with SPS allocation */
31924 rgSCHCmnSpsDlUpdDlSfAllocWithSps(cell, frm, dlSf);
31933 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31938 * Function: rgSCHCmnSendTxModeInd(cell, ueUl, newTxMode)
31939 * Purpose: This function sends the TX mode Change
31940 * indication to RRM
31945 * @param[in] RgSchCellCb *cell
31946 * @param[in] RgSchUeCb *ue
31947 * @param[in] U8 newTxMode
31951 PRIVATE Void rgSCHCmnSendTxModeInd
31958 PRIVATE Void rgSCHCmnSendTxModeInd(cell, ue, newTxMode)
31964 RgmTransModeInd *txModeChgInd;
31965 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
31967 TRC2(rgSCHCmnSendTxModeInd);
31969 if(!(ueDl->mimoInfo.forceTD & RG_SCH_CMN_TD_TXMODE_RECFG))
31972 if(SGetSBuf(cell->rgmSap->sapCfg.sapPst.region,
31973 cell->rgmSap->sapCfg.sapPst.pool, (Data**)&txModeChgInd,
31974 sizeof(RgmTransModeInd)) != ROK)
31978 RG_SCH_FILL_RGM_TRANSMODE_IND(ue->ueId, cell->cellId, newTxMode, txModeChgInd);
31979 RgUiRgmChangeTransModeInd(&(cell->rgmSap->sapCfg.sapPst),
31980 cell->rgmSap->sapCfg.suId, txModeChgInd);
31983 ue->mimoInfo.txModUpChgFactor = 0;
31984 ue->mimoInfo.txModDownChgFactor = 0;
31985 ueDl->laCb[0].deltaiTbs = 0;
31991 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31996 * Function: rgSchCheckAndTriggerModeChange(cell, ueUl, iTbsNew)
31997 * Purpose: This function update and check for threashold for TM mode
32002 * @param[in] RgSchCellCb *cell
32003 * @param[in] RgSchUeCb *ue
32004 * @param[in] U8 iTbs
32008 PUBLIC Void rgSchCheckAndTriggerModeChange
32017 PUBLIC Void rgSchCheckAndTriggerModeChange(cell, ue, reportediTbs, previTbs, maxiTbs)
32025 RgrTxMode txMode; /*!< UE's Transmission Mode */
32026 RgrTxMode modTxMode; /*!< UE's Transmission Mode */
32028 TRC2(rgSchCheckAndTriggerModeChange);
32030 txMode = ue->mimoInfo.txMode;
32032 /* Check for Step down */
32033 /* Step down only when TM4 is configured. */
32034 if(RGR_UE_TM_4 == txMode)
32036 if((previTbs <= reportediTbs) && ((reportediTbs - previTbs) >= RG_SCH_MODE_CHNG_STEPDOWN_CHECK_FACTOR))
32038 ue->mimoInfo.txModDownChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32042 ue->mimoInfo.txModDownChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32045 ue->mimoInfo.txModDownChgFactor =
32046 RGSCH_MAX(ue->mimoInfo.txModDownChgFactor, -(RG_SCH_MODE_CHNG_STEPDOWN_THRSHD));
32048 if(ue->mimoInfo.txModDownChgFactor >= RG_SCH_MODE_CHNG_STEPDOWN_THRSHD)
32050 /* Trigger Mode step down */
32051 modTxMode = RGR_UE_TM_3;
32052 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32056 /* Check for Setup up */
32057 /* Step Up only when TM3 is configured, Max possible Mode is TM4*/
32058 if(RGR_UE_TM_3 == txMode)
32060 if((previTbs > reportediTbs) || (maxiTbs == previTbs))
32062 ue->mimoInfo.txModUpChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32066 ue->mimoInfo.txModUpChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32069 ue->mimoInfo.txModUpChgFactor =
32070 RGSCH_MAX(ue->mimoInfo.txModUpChgFactor, -(RG_SCH_MODE_CHNG_STEPUP_THRSHD));
32072 /* Check if TM step up need to be triggered */
32073 if(ue->mimoInfo.txModUpChgFactor >= RG_SCH_MODE_CHNG_STEPUP_THRSHD)
32075 /* Trigger mode chnage */
32076 modTxMode = RGR_UE_TM_4;
32077 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32086 * @brief Updates the GBR LCGs when datInd is received from MAC
32090 * Function: rgSCHCmnIsDlCsgPrio (cell)
32091 * Purpose: This function returns if csg UEs are
32092 * having priority at current time
32094 * Invoked by: Scheduler
32096 * @param[in] RgSchCellCb *cell
32097 * @param[in] RgSchUeCb *ue
32098 * @param[in] RgInfUeDatInd *datInd
32102 PUBLIC Bool rgSCHCmnIsDlCsgPrio
32107 PUBLIC Bool rgSCHCmnIsDlCsgPrio(cell)
32112 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
32114 TRC2(rgSCHCmnIsDlCsgPrio)
32115 /* Calculating the percentage resource allocated */
32116 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32122 if(((cmnDlCell->ncsgPrbCnt * 100) / cmnDlCell->totPrbCnt) < cell->minDlResNonCsg)
32134 * @brief Updates the GBR LCGs when datInd is received from MAC
32138 * Function: rgSCHCmnIsUlCsgPrio (cell)
32139 * Purpose: This function returns if csg UEs are
32140 * having priority at current time
32142 * Invoked by: Scheduler
32144 * @param[in] RgSchCellCb *cell
32145 * @param[in] RgSchUeCb *ue
32146 * @param[in] RgInfUeDatInd *datInd
32150 PUBLIC Bool rgSCHCmnIsUlCsgPrio
32155 PUBLIC Bool rgSCHCmnIsUlCsgPrio(cell)
32159 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
32161 TRC2(rgSCHCmnIsUlCsgPrio)
32163 /* Calculating the percentage resource allocated */
32164 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32170 if (((cmnUlCell->ncsgPrbCnt * 100) /cmnUlCell->totPrbCnt) < cell->minUlResNonCsg)
32181 /** @brief DL scheduler for SPS, and all other downlink data
32185 * Function: rgSchCmnPreDlSch
32187 * @param [in] Inst schInst;
32192 PUBLIC Void rgSchCmnPreDlSch
32194 RgSchCellCb **cell,
32196 RgSchCellCb **cellLst
32199 PUBLIC Void rgSchCmnPreDlSch(cell, nCell, cellLst)
32200 RgSchCellCb **cell;
32202 RgSchCellCb **cellLst;
32205 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell[0]);
32209 TRC2(rgSchCmnPreDlSch);
32211 if(nCell > CM_LTE_MAX_CELLS)
32216 if (cell[0]->isDlDataAllwd && (cell[0]->stopDlSch == FALSE))
32218 /* Specific DL scheduler to perform UE scheduling */
32219 cellSch->apisDl->rgSCHDlPreSched(cell[0]);
32221 /* Rearranging the cell entries based on their remueCnt in SF.
32222 * cells will be processed in the order of number of ue scheduled
32224 for (idx = 0; idx < nCell; idx++)
32227 cellSch = RG_SCH_CMN_GET_CELL(cell[idx]);
32228 sf = cellSch->allocInfo.dedAlloc.dedDlSf;
32232 cellLst[idx] = cell[idx];
32236 for(j = 0; j < idx; j++)
32238 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cellLst[j]);
32239 RgSchDlSf *subfrm = cmnCell->allocInfo.dedAlloc.dedDlSf;
32241 if(sf->remUeCnt < subfrm->remUeCnt)
32244 for(k = idx; k > j; k--)
32246 cellLst[k] = cellLst[k-1];
32251 cellLst[j] = cell[idx];
32256 for (idx = 0; idx < nCell; idx++)
32258 cellLst[idx] = cell[idx];
32264 /** @brief DL scheduler for SPS, and all other downlink data
32267 * Function: rgSchCmnPstDlSch
32269 * @param [in] Inst schInst;
32274 PUBLIC Void rgSchCmnPstDlSch
32279 PUBLIC Void rgSchCmnPstDlSch(cell)
32283 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32285 TRC2(rgSchCmnPstDlSch);
32287 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
32289 cellSch->apisDl->rgSCHDlPstSched(cell->instIdx);
32294 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
32300 PUBLIC U8 rgSCHCmnCalcPcqiBitSz(ueCb, numTxAnt)
32308 RgSchUePCqiCb *cqiCb = ueCb->nPCqiCb;
32310 TRC3(rgSCHCmnCalcPcqiBitSz);
32312 confRepMode = cqiCb->cqiCfg.cqiSetup.prdModeEnum;
32313 if((ueCb->mimoInfo.txMode != RGR_UE_TM_3) &&
32314 (ueCb->mimoInfo.txMode != RGR_UE_TM_4))
32320 ri = cqiCb->perRiVal;
32322 switch(confRepMode)
32324 case RGR_PRD_CQI_MOD10:
32330 case RGR_PRD_CQI_MOD11:
32343 else if(numTxAnt == 4)
32356 /* This is number of antenna case 1.
32357 * This is not applicable for Mode 1-1.
32358 * So setting it to invalid value */
32364 case RGR_PRD_CQI_MOD20:
32372 pcqiSz = 4 + cqiCb->label;
32377 case RGR_PRD_CQI_MOD21:
32392 else if(numTxAnt == 4)
32405 /* This might be number of antenna case 1.
32406 * For mode 2-1 wideband case only antenna port 2 or 4 is supported.
32407 * So setting invalid value.*/
32415 pcqiSz = 4 + cqiCb->label;
32419 pcqiSz = 7 + cqiCb->label;
32432 /** @brief DL scheduler for SPS, and all other downlink data
32436 * Function: rgSCHCmnDlSch
32438 * @param [in] RgSchCellCb *cell
32444 PUBLIC Void rgSCHCmnDlSch
32449 PUBLIC Void rgSCHCmnDlSch (cell)
32454 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32456 RgSchDynTddCb *rgSchDynTddInfo = &(rgSchCb[cell->instIdx].rgSchDynTdd);
32460 TRC2(rgSCHCmnDlSch);
32462 dlSf = rgSCHUtlSubFrmGet(cell, cellSch->dl.time);
32464 if (rgSchDynTddInfo->isDynTddEnbld)
32466 RG_SCH_DYN_TDD_GET_SFIDX(dlCntrlSfIdx, rgSchDynTddInfo->crntDTddSfIdx,
32467 RG_SCH_CMN_DL_DELTA);
32468 if(RG_SCH_DYNTDD_DLC_ULD == rgSchDynTddInfo->sfInfo[dlCntrlSfIdx].sfType)
32470 if(1 == cell->cellId)
32472 ul5gtfsidDlAlreadyMarkUl++;
32474 printf("ul5gtfsidDlAlreadyMarkUl: %d, [sfn:sf] [%04d:%02d]\n",
32475 ul5gtfsidDlAlreadyMarkUl, cellSch->dl.time.sfn,
32476 cellSch->dl.time.slot);
32484 /* Specific DL scheduler to perform UE scheduling */
32485 cellSch->apisDl->rgSCHDlNewSched(cell, &cellSch->allocInfo);
32486 /* LTE_ADV_FLAG_REMOVED_END */
32488 /* call common allocator for RB Allocation */
32489 rgSCHCmnDlRbAlloc(cell, &cellSch->allocInfo);
32491 /* Finalize the Allocations for reqested Against alloced */
32492 rgSCHCmnDlAllocFnlz(cell);
32494 /* Perform Pdcch allocations for PDCCH Order Q.
32495 * As of now, giving this the least preference.
32496 * This func call could be moved above other allocations
32498 rgSCHCmnGenPdcchOrder(cell, dlSf);
32500 /* Do group power control for PUCCH */
32501 rgSCHCmnGrpPwrCntrlPucch(cell, dlSf);
32506 /**********************************************************************
32509 **********************************************************************/