1 /*******************************************************************************
2 ################################################################################
3 # Copyright (c) [2017-2019] [Radisys] #
5 # Licensed under the Apache License, Version 2.0 (the "License"); #
6 # you may not use this file except in compliance with the License. #
7 # You may obtain a copy of the License at #
9 # http://www.apache.org/licenses/LICENSE-2.0 #
11 # Unless required by applicable law or agreed to in writing, software #
12 # distributed under the License is distributed on an "AS IS" BASIS, #
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 # See the License for the specific language governing permissions and #
15 # limitations under the License. #
16 ################################################################################
17 *******************************************************************************/
19 /************************************************************************
25 Desc: C source code for Entry point fucntions
29 **********************************************************************/
31 /** @file rg_sch_cmn.c
32 @brief This file implements the schedulers main access to MAC layer code.
35 static const char* RLOG_MODULE_NAME="MAC";
36 static int RLOG_FILE_ID=187;
37 static int RLOG_MODULE_ID=4096;
39 /* header include files -- defines (.h) */
40 #include "envopt.h" /* environment options */
41 #include "envdep.h" /* environment dependent */
42 #include "envind.h" /* environment independent */
43 #include "gen.h" /* general layer */
44 #include "ssi.h" /* system service interface */
45 #include "cm_hash.h" /* common hash list */
46 #include "cm_llist.h" /* common linked list library */
47 #include "cm_err.h" /* common error */
48 #include "cm_lte.h" /* common LTE */
55 #include "rg_sch_err.h"
56 #include "rg_sch_inf.h"
58 #include "rg_sch_cmn.h"
59 #include "rl_interface.h"
60 #include "rl_common.h"
62 /* header/extern include files (.x) */
63 #include "gen.x" /* general layer typedefs */
64 #include "ssi.x" /* system services typedefs */
65 #include "cm5.x" /* common timers */
66 #include "cm_hash.x" /* common hash list */
67 #include "cm_lib.x" /* common library */
68 #include "cm_llist.x" /* common linked list */
69 #include "cm_mblk.x" /* memory management */
70 #include "cm_tkns.x" /* common tokens */
71 #include "cm_lte.x" /* common tokens */
72 #include "tfu.x" /* TFU types */
73 #include "lrg.x" /* layer management typedefs for MAC */
74 #include "rgr.x" /* layer management typedefs for MAC */
75 #include "rgm.x" /* layer management typedefs for MAC */
76 #include "rg_sch_inf.x" /* typedefs for Scheduler */
77 #include "rg_sch.x" /* typedefs for Scheduler */
78 #include "rg_sch_cmn.x" /* typedefs for Scheduler */
80 #include "lrg.x" /* Stats Structures */
81 #endif /* MAC_SCH_STATS */
84 #endif /* __cplusplus */
87 EXTERN U32 emtcStatsUlTomSrInd;
88 EXTERN U32 emtcStatsUlBsrTmrTxp;
91 #define RG_ITBS_DIFF(_x, _y) ((_x) > (_y) ? (_x) - (_y) : (_y) - (_x))
92 EXTERN Void rgSCHSc1UlInit ARGS((RgUlSchdApis *apis));
93 #ifdef RG_PHASE2_SCHED
94 EXTERN Void rgSCHRrUlInit ARGS((RgUlSchdApis *apis));
96 EXTERN Void rgSCHEmtcHqInfoFree ARGS((RgSchCellCb *cell, RgSchDlHqProcCb *hqP));
97 EXTERN Void rgSCHEmtcRrUlInit ARGS((RgUlSchdApis *apis));
98 EXTERN Void rgSCHEmtcCmnDlInit ARGS((Void));
99 EXTERN Void rgSCHEmtcCmnUlInit ARGS((Void));
100 EXTERN Void rgSCHEmtcCmnUeNbReset ARGS((RgSchUeCb *ueCb));
101 EXTERN RgSchCmnCqiToTbs *rgSchEmtcCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
103 EXTERN Void rgSCHMaxciUlInit ARGS((RgUlSchdApis *apis));
104 EXTERN Void rgSCHPfsUlInit ARGS((RgUlSchdApis *apis));
106 EXTERN Void rgSCHSc1DlInit ARGS((RgDlSchdApis *apis));
107 #ifdef RG_PHASE2_SCHED
108 EXTERN Void rgSCHRrDlInit ARGS((RgDlSchdApis *apis));
110 EXTERN Void rgSCHEmtcRrDlInit ARGS((RgDlEmtcSchdApis *apis));
112 EXTERN Void rgSCHMaxciDlInit ARGS((RgDlSchdApis *apis));
113 EXTERN Void rgSCHPfsDlInit ARGS((RgDlSchdApis *apis));
115 EXTERN Void rgSCHDlfsInit ARGS((RgDlfsSchdApis *apis));
119 EXTERN Void rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl ARGS((RgSchCellCb *cell));
120 EXTERN Void rgSCHCmnGetEmtcDciFrmtSizes ARGS((RgSchCellCb *cell));
121 EXTERN Void rgSCHEmtcRrUlProcRmvFrmRetx ARGS((RgSchCellCb *cell, RgSchUlHqProcCb *proc));
122 EXTERN S16 rgSCHCmnPrecompEmtcMsg3Vars
124 RgSchCmnUlCell *cellUl,
130 PUBLIC Void rgSCHEmtcCmnUeCcchSduDel
135 EXTERN Void rgSCHEmtcRmvFrmTaLst
137 RgSchCmnDlCell *cellDl,
140 EXTERN Void rgSCHEmtcInitTaLst
142 RgSchCmnDlCell *cellDl
144 EXTERN Void rgSCHEmtcAddToTaLst
146 RgSchCmnDlCell *cellDl,
153 PRIVATE Void rgSCHDlSiSched ARGS((RgSchCellCb *cell,
154 RgSchCmnDlRbAllocInfo *allocInfo,
155 RgInfSfAlloc *subfrmAlloc));
156 PRIVATE Void rgSCHChkNUpdSiCfg ARGS((RgSchCellCb *cell));
157 PRIVATE Void rgSCHSelectSi ARGS((RgSchCellCb *cell));
158 #endif /*RGR_SI_SCH*/
159 /* LTE_ADV_FLAG_REMOVED_START */
161 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
169 PRIVATE S16 rgSCHCmnBuildRntpInfo (
178 PUBLIC Void rgSCHCmnDlSpsSch
182 /* LTE_ADV_FLAG_REMOVED_END */
184 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc ARGS((
186 RgSchCmnDlRbAllocInfo *allocInfo
188 PRIVATE Void rgSCHBcchPcchDlRbAlloc ARGS((
190 RgSchCmnDlRbAllocInfo *allocInfo
192 PRIVATE Void rgSCHCmnDlBcchPcchAlloc ARGS((
196 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
199 TfuDlCqiPucch *pucchCqi,
200 RgrUeCqiRept *ueCqiRept,
202 Bool *is2ndCwCqiAvail
204 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
207 TfuDlCqiPusch *puschCqi,
208 RgrUeCqiRept *ueCqiRept,
210 Bool *is2ndCwCqiAvail
213 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
216 TfuDlCqiPucch *pucchCqi
218 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
221 TfuDlCqiPusch *puschCqi
224 /* ccpu00117452 - MOD - Changed macro name from
225 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
227 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept ARGS((
230 RgrUeCqiRept *ueCqiRept));
231 #endif /* End of RGR_CQI_REPT */
232 /* Fix: syed align multiple UEs to refresh at same time */
233 PRIVATE Void rgSCHCmnGetRefreshPer ARGS((
237 PRIVATE S16 rgSCHCmnApplyUeRefresh ARGS((
241 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa ARGS
246 PRIVATE Void rgSCHCheckAndSetTxScheme ARGS
254 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz ARGS
264 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw ARGS
280 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
284 RgSchDlRbAlloc *allocInfo,
287 PRIVATE Void rgSCHCmnInitRbAlloc ARGS
293 #endif /* __cplusplus */
297 PUBLIC RgSchdApis rgSchCmnApis;
298 PRIVATE RgUlSchdApis rgSchUlSchdTbl[RGSCH_NUM_SCHEDULERS];
299 PRIVATE RgDlSchdApis rgSchDlSchdTbl[RGSCH_NUM_SCHEDULERS];
301 PRIVATE RgUlSchdApis rgSchEmtcUlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
302 PRIVATE RgDlEmtcSchdApis rgSchEmtcDlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
304 #ifdef RG_PHASE2_SCHED
305 PRIVATE RgDlfsSchdApis rgSchDlfsSchdTbl[RGSCH_NUM_DLFS_SCHEDULERS];
307 PRIVATE RgUlSchdInits rgSchUlSchdInits = RGSCH_ULSCHED_INITS;
308 PRIVATE RgDlSchdInits rgSchDlSchdInits = RGSCH_DLSCHED_INITS;
310 PRIVATE RgEmtcUlSchdInits rgSchEmtcUlSchdInits = RGSCH_EMTC_ULSCHED_INITS;
311 PRIVATE RgEmtcDlSchdInits rgSchEmtcDlSchdInits = RGSCH_EMTC_DLSCHED_INITS;
313 #if (defined (RG_PHASE2_SCHED) && defined (TFU_UPGRADE))
314 PRIVATE RgDlfsSchdInits rgSchDlfsSchdInits = RGSCH_DLFSSCHED_INITS;
317 typedef Void (*RgSchCmnDlAllocRbFunc) ARGS((RgSchCellCb *cell, RgSchDlSf *subFrm,
318 RgSchUeCb *ue, U32 bo, U32 *effBo, RgSchDlHqProcCb *proc,
319 RgSchCmnDlRbAllocInfo *cellWdAllocInfo));
320 typedef U8 (*RgSchCmnDlGetPrecInfFunc) ARGS((RgSchCellCb *cell, RgSchUeCb *ue,
321 U8 numLyrs, Bool bothCwEnbld));
323 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1 ARGS((
325 RgSchDlRbAlloc *rbAllocInfo,
326 RgSchDlHqProcCb *hqP,
330 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A ARGS((
332 RgSchDlRbAlloc *rbAllocInfo,
333 RgSchDlHqProcCb *hqP,
337 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B ARGS((
339 RgSchDlRbAlloc *rbAllocInfo,
340 RgSchDlHqProcCb *hqP,
344 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2 ARGS((
346 RgSchDlRbAlloc *rbAllocInfo,
347 RgSchDlHqProcCb *hqP,
351 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A ARGS((
353 RgSchDlRbAlloc *rbAllocInfo,
354 RgSchDlHqProcCb *hqP,
358 PRIVATE Void rgSCHCmnDlAllocTxRbTM1 ARGS((
364 RgSchDlHqProcCb *proc,
365 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
367 PRIVATE Void rgSCHCmnDlAllocTxRbTM2 ARGS((
373 RgSchDlHqProcCb *proc,
374 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
376 PRIVATE Void rgSCHCmnDlAllocTxRbTM3 ARGS((
382 RgSchDlHqProcCb *proc,
383 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
385 PRIVATE Void rgSCHCmnDlAllocTxRbTM4 ARGS((
391 RgSchDlHqProcCb *proc,
392 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
395 PRIVATE Void rgSCHCmnDlAllocTxRbTM5 ARGS((
401 RgSchDlHqProcCb *proc,
402 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
405 PRIVATE Void rgSCHCmnDlAllocTxRbTM6 ARGS((
411 RgSchDlHqProcCb *proc,
412 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
414 PRIVATE Void rgSCHCmnDlAllocTxRbTM7 ARGS((
420 RgSchDlHqProcCb *proc,
421 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
423 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1 ARGS((
429 RgSchDlHqProcCb *proc,
430 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
432 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2 ARGS((
438 RgSchDlHqProcCb *proc,
439 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
441 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3 ARGS((
447 RgSchDlHqProcCb *proc,
448 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
450 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4 ARGS((
456 RgSchDlHqProcCb *proc,
457 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
460 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5 ARGS((
466 RgSchDlHqProcCb *proc,
467 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
470 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6 ARGS((
476 RgSchDlHqProcCb *proc,
477 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
479 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7 ARGS((
485 RgSchDlHqProcCb *proc,
486 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
490 PRIVATE U8 rgSchGetN1ResCount ARGS ((
494 PUBLIC Bool rgSchCmnChkDataOnlyOnPcell
500 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
507 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
508 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[7] = {rgSCHCmnDlAllocTxRbTM1,
509 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
510 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7};
512 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
513 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[7] = {rgSCHCmnDlAllocRetxRbTM1,
514 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
515 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7};
517 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
518 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[9] = {rgSCHCmnDlAllocTxRbTM1,
519 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
520 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7, NULLP, NULLP};
522 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
523 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[9] = {rgSCHCmnDlAllocRetxRbTM1,
524 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
525 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7, NULLP, NULLP};
530 PRIVATE U8 rgSCHCmnDlTM3PrecInf2 ARGS((
536 PRIVATE U8 rgSCHCmnDlTM3PrecInf4 ARGS((
542 PRIVATE U8 rgSCHCmnDlTM4PrecInf2 ARGS((
548 PRIVATE U8 rgSCHCmnDlTM4PrecInf4 ARGS((
554 /* Functions specific to each transmission mode for DL RB Allocation*/
555 RgSchCmnDlGetPrecInfFunc getPrecInfoFunc[2][2] = {
556 {rgSCHCmnDlTM3PrecInf2, rgSCHCmnDlTM3PrecInf4},
557 {rgSCHCmnDlTM4PrecInf2, rgSCHCmnDlTM4PrecInf4}
560 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb ARGS((
564 RgSchDlHqTbCb *tbInfo,
569 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb ARGS((
573 RgSchDlHqProcCb *proc,
578 PRIVATE Void rgSCHCmnDlTM3TxTx ARGS((
584 RgSchDlHqProcCb *proc,
585 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
587 PRIVATE Void rgSCHCmnDlTM3TxRetx ARGS((
593 RgSchDlHqProcCb *proc,
594 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
596 PRIVATE Void rgSCHCmnDlTM3RetxRetx ARGS((
602 RgSchDlHqProcCb *proc,
603 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
606 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc ARGS((
612 /* LTE_ADV_FLAG_REMOVED_START */
614 PRIVATE Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc ARGS((
621 /* LTE_ADV_FLAG_REMOVED_END */
622 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx ARGS((
624 RgSchCmnDlRbAllocInfo *allocInfo,
626 RgSchDlHqProcCb *proc
628 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx ARGS((
630 RgSchCmnDlRbAllocInfo *allocInfo,
634 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst ARGS((
635 RgSchCmnDlRbAllocInfo *allocInfo,
637 RgSchDlHqProcCb *proc
639 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb ARGS((
643 RgSchDlHqTbCb *reTxTb,
648 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb ARGS((
652 RgSchDlHqProcCb *proc,
657 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb ARGS((
661 RgSchDlHqTbCb *tbInfo,
667 PRIVATE Void rgSCHCmnFillHqPTb ARGS((
669 RgSchDlRbAlloc *rbAllocInfo,
675 PRIVATE Void rgSCHCmnDlGetBestFitHole ARGS((
684 #ifdef RGSCH_SPS_UNUSED
685 PRIVATE U32 rgSCHCmnGetRaType1Mask ARGS((
691 PRIVATE U32 rgSCHCmnGetRaType0Mask ARGS((
695 PRIVATE U32 rgSCHCmnGetRaType2Mask ARGS((
701 PUBLIC Bool rgSCHCmnRetxAllocAvoid ARGS((
704 RgSchDlHqProcCb *proc
707 PUBLIC U16 rgSCHCmnGetSiSetId ARGS((
714 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi ARGS((
727 //TODO_SID: Currenly table is only for 100 Prbs. Need to modify wrt VRBG table 8.1.5.2.1-1 V5G_213
728 U32 rgSch5gtfTbSzTbl[MAX_5GTF_MCS] =
729 {1864, 5256, 8776, 13176, 17576, 21976, 26376, 31656, 35176, 39576, 43976, 47496, 52776, 59376, 66392};
731 U32 gUl5gtfSrRecv = 0;
732 U32 gUl5gtfBsrRecv = 0;
733 U32 gUl5gtfUeSchPick = 0;
734 U32 gUl5gtfPdcchSchd = 0;
735 U32 gUl5gtfAllocAllocated = 0;
736 U32 gUl5gtfUeRbAllocDone = 0;
737 U32 gUl5gtfUeRmvFnlzZeroBo = 0;
738 U32 gUl5gtfUeFnlzReAdd = 0;
739 U32 gUl5gtfPdcchSend = 0;
740 U32 gUl5gtfRbAllocFail = 0;
741 U32 ul5gtfsidUlMarkUl = 0;
742 U32 ul5gtfsidDlSchdPass = 0;
743 U32 ul5gtfsidDlAlreadyMarkUl = 0;
744 U32 ul5gtfTotSchdCnt = 0;
747 /* CQI Offset Index to Beta CQI Offset value mapping,
748 * stored as parts per 1000. Reserved is set to 0.
749 * Refer 36.213 sec 8.6.3 Tbl 8.6.3-3 */
750 PUBLIC U32 rgSchCmnBetaCqiOffstTbl[16] = {0, 0, 1125,
751 1250, 1375, 1625, 1750, 2000, 2250, 2500, 2875,
752 3125, 3500, 4000, 5000, 6250};
753 PUBLIC U32 rgSchCmnBetaHqOffstTbl[16] = {2000, 2500, 3125,
754 4000, 5000, 6250, 8000,10000, 12625, 15875, 20000,
755 31000, 50000,80000,126000,0};
756 PUBLIC U32 rgSchCmnBetaRiOffstTbl[16] = {1250, 1625, 2000,
757 2500, 3125, 4000, 5000, 6250, 8000, 10000,12625,
759 PUBLIC S8 rgSchCmnDlCqiDiffOfst[8] = {0, 1, 2, 3, -4, -3, -2, -1};
761 /* Include CRS REs while calculating Efficiency */
762 CONSTANT PRIVATE U8 rgSchCmnAntIdx[5] = {0,0,1,0,2};
763 CONSTANT PRIVATE U8 rgSchCmnNumResForCrs[5] = {0,6,12,0,16};
770 PUBLIC S8 rgSchCmnApUeSelDiffCqi[4] = {1, 2, 3, 4};
771 PUBLIC S8 rgSchCmnApEnbConfDiffCqi[4] = {0, 1, 2, -1};
774 typedef struct rgSchCmnDlUeDciFrmtOptns
776 TfuDciFormat spfcDciFrmt; /* TM(Transmission Mode) specific DCI format.
777 * Search space : UE Specific by C-RNTI only. */
778 U8 spfcDciRAType; /* Resource Alloctn(RA) type for spfcDciFrmt */
779 TfuDciFormat prfrdDciFrmt; /* Preferred DCI format among the available
780 * options for TD (Transmit Diversity) */
781 U8 prfrdDciRAType; /* Resource Alloctn(RA) type for prfrdDciFrmt */
782 }RgSchCmnDlUeDciFrmtOptns;
785 /* DCI Format options for each Transmission Mode */
786 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[7] = {
787 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
788 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
789 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
790 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
791 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
792 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
793 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
797 /* DCI Format options for each Transmission Mode */
798 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[9] = {
799 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
800 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
801 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
802 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
803 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
804 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
805 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
810 typedef struct rgSchCmnDlImcsTbl
812 U8 modOdr; /* Modulation Order */
814 }RgSchCmnDlImcsTbl[29];
816 CONSTANT struct rgSchCmnMult235Info
818 U8 match; /* Closest number satisfying 2^a.3^b.5^c, with a bias
819 * towards the smaller number */
820 U8 prvMatch; /* Closest number not greater than array index
821 * satisfying 2^a.3^b.5^c */
822 } rgSchCmnMult235Tbl[110+1] = {
824 {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {6, 6}, {8, 8},
825 {9, 9}, {10, 10}, {10, 10}, {12, 12}, {12, 12}, {15, 12}, {15, 15},
826 {16, 16}, {16, 16}, {18, 18}, {18, 18}, {20, 20}, {20, 20}, {20, 20},
827 {24, 20}, {24, 24}, {25, 25}, {25, 25}, {27, 27}, {27, 27}, {30, 27},
828 {30, 30}, {30, 30}, {32, 32}, {32, 32}, {32, 32}, {36, 32}, {36, 36},
829 {36, 36}, {36, 36}, {40, 36}, {40, 40}, {40, 40}, {40, 40}, {45, 40},
830 {45, 40}, {45, 45}, {45, 45}, {48, 45}, {48, 48}, {48, 48}, {50, 50},
831 {50, 50}, {50, 50}, {54, 50}, {54, 54}, {54, 54}, {54, 54}, {54, 54},
832 {60, 54}, {60, 54}, {60, 60}, {60, 60}, {60, 60}, {64, 60}, {64, 64},
833 {64, 64}, {64, 64}, {64, 64}, {64, 64}, {72, 64}, {72, 64}, {72, 64},
834 {72, 72}, {72, 72}, {75, 72}, {75, 75}, {75, 75}, {75, 75}, {80, 75},
835 {80, 75}, {80, 80}, {81, 81}, {81, 81}, {81, 81}, {81, 81}, {81, 81},
836 {90, 81}, {90, 81}, {90, 81}, {90, 81}, {90, 90}, {90, 90}, {90, 90},
837 {90, 90}, {96, 90}, {96, 90}, {96, 96}, {96, 96}, {96, 96}, {100, 96},
838 {100, 100}, {100, 100}, {100, 100}, {100, 100}, {100, 100}, {108, 100},
839 {108, 100}, {108, 100}, {108, 108}, {108, 108}, {108, 108}
843 /* BI table from 36.321 Table 7.2.1 */
844 CONSTANT PRIVATE S16 rgSchCmnBiTbl[RG_SCH_CMN_NUM_BI_VAL] = {
845 0, 10, 20, 30,40,60,80,120,160,240,320,480,960};
846 PUBLIC RgSchCmnUlCqiInfo rgSchCmnUlCqiTbl[RG_SCH_CMN_UL_NUM_CQI] = {
848 {RGSCH_CMN_QM_CQI_1,RGSCH_CMN_UL_EFF_CQI_1 },
849 {RGSCH_CMN_QM_CQI_2,RGSCH_CMN_UL_EFF_CQI_2 },
850 {RGSCH_CMN_QM_CQI_3,RGSCH_CMN_UL_EFF_CQI_3 },
851 {RGSCH_CMN_QM_CQI_4,RGSCH_CMN_UL_EFF_CQI_4 },
852 {RGSCH_CMN_QM_CQI_5,RGSCH_CMN_UL_EFF_CQI_5 },
853 {RGSCH_CMN_QM_CQI_6,RGSCH_CMN_UL_EFF_CQI_6 },
854 {RGSCH_CMN_QM_CQI_7,RGSCH_CMN_UL_EFF_CQI_7 },
855 {RGSCH_CMN_QM_CQI_8,RGSCH_CMN_UL_EFF_CQI_8 },
856 {RGSCH_CMN_QM_CQI_9,RGSCH_CMN_UL_EFF_CQI_9 },
857 {RGSCH_CMN_QM_CQI_10,RGSCH_CMN_UL_EFF_CQI_10 },
858 {RGSCH_CMN_QM_CQI_11,RGSCH_CMN_UL_EFF_CQI_11 },
859 {RGSCH_CMN_QM_CQI_12,RGSCH_CMN_UL_EFF_CQI_12 },
860 {RGSCH_CMN_QM_CQI_13,RGSCH_CMN_UL_EFF_CQI_13 },
861 {RGSCH_CMN_QM_CQI_14,RGSCH_CMN_UL_EFF_CQI_14 },
862 {RGSCH_CMN_QM_CQI_15,RGSCH_CMN_UL_EFF_CQI_15 },
866 /* This table maps a (delta_offset * 2 + 2) to a (beta * 8)
867 * where beta is 10^-(delta_offset/10) rounded off to nearest 1/8
869 PRIVATE U16 rgSchCmnUlBeta8Tbl[29] = {
870 6, RG_SCH_CMN_UL_INVALID_BETA8, 8, 9, 10, 11, 13, 14, 16, 18, 20, 23,
871 25, 28, 32, RG_SCH_CMN_UL_INVALID_BETA8, 40, RG_SCH_CMN_UL_INVALID_BETA8,
872 50, RG_SCH_CMN_UL_INVALID_BETA8, 64, RG_SCH_CMN_UL_INVALID_BETA8, 80,
873 RG_SCH_CMN_UL_INVALID_BETA8, 101, RG_SCH_CMN_UL_INVALID_BETA8, 127,
874 RG_SCH_CMN_UL_INVALID_BETA8, 160
878 /* QCI to SVC priority mapping. Index specifies the Qci*/
879 PRIVATE U8 rgSchCmnDlQciPrio[RG_SCH_CMN_MAX_QCI] = RG_SCH_CMN_QCI_TO_PRIO;
881 /* The configuration is efficiency measured per 1024 REs. */
882 /* The first element stands for when CQI is not known */
883 /* This table is used to translate CQI to its corrospoding */
884 /* allocation parameters. These are currently from 36.213 */
885 /* Just this talbe needs to be edited for modifying the */
886 /* the resource allocation behaviour */
888 /* ADD CQI to MCS mapping correction
889 * single dimensional array is replaced by 2 dimensions for different CFI*/
890 PRIVATE U16 rgSchCmnCqiPdschEff[4][16] = {RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI1,
891 RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI2,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI3};
893 PRIVATE U16 rgSchCmn2LyrCqiPdschEff[4][16] = {RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI1,
894 RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI2, RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI3};
896 /* This configuration determines the transalation of a UEs CQI to its */
897 /* PDCCH coding efficiency. This may be edited based on the installation */
898 PRIVATE U8 rgSchCmnDlRvTbl[4] = {0, 2, 3, 1}; /* RVIdx sequence is corrected*/
900 /* Indexed by [DciFrmt].
901 * Considering the following definition in determining the dciFrmt index.
916 PRIVATE U16 rgSchCmnDciFrmtSizes[10];
919 PRIVATE U16 rgSchCmnCqiPdcchEff[16] = RG_SCH_CMN_CQI_TO_PDCCH_EFF;
923 PUBLIC RgSchTddUlDlSubfrmTbl rgSchTddUlDlSubfrmTbl = {
924 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME},
925 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
926 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
927 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
928 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
929 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
930 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME}
935 PUBLIC U8 rgSchTddSpsDlMaxRetxTbl[RGSCH_MAX_TDD_UL_DL_CFG] = {
947 /* Special Subframes in OFDM symbols */
948 /* ccpu00134197-MOD-Correct the number of symbols */
949 PUBLIC RgSchTddSplSubfrmInfoTbl rgSchTddSplSubfrmInfoTbl = {
953 {11, 1, 1, 10, 1, 1},
961 /* PHICH 'm' value Table */
962 PUBLIC RgSchTddPhichMValTbl rgSchTddPhichMValTbl = {
963 {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
964 {0, 1, 0, 0, 1, 0, 1, 0, 0, 1},
965 {0, 0, 0, 1, 0, 0, 0, 0, 1, 0},
966 {1, 0, 0, 0, 0, 0, 0, 0, 1, 1},
967 {0, 0, 0, 0, 0, 0, 0, 0, 1, 1},
968 {0, 0, 0, 0, 0, 0, 0, 0, 1, 0},
969 {1, 1, 0, 0, 0, 1, 1, 0, 0, 1}
972 /* PHICH 'K' value Table */
973 PUBLIC RgSchTddKPhichTbl rgSchTddKPhichTbl = {
974 {0, 0, 4, 7, 6, 0, 0, 4, 7, 6},
975 {0, 0, 4, 6, 0, 0, 0, 4, 6, 0},
976 {0, 0, 6, 0, 0, 0, 0, 6, 0, 0},
977 {0, 0, 6, 6, 6, 0, 0, 0, 0, 0},
978 {0, 0, 6, 6, 0, 0, 0, 0, 0, 0},
979 {0, 0, 6, 0, 0, 0, 0, 0, 0, 0},
980 {0, 0, 4, 6, 6, 0, 0, 4, 7, 0}
983 /* Uplink association index 'K' value Table */
984 PUBLIC RgSchTddUlAscIdxKDashTbl rgSchTddUlAscIdxKDashTbl = {
985 {0, 0, 6, 4, 0, 0, 0, 6, 4, 0},
986 {0, 0, 4, 0, 0, 0, 0, 4, 0, 0},
987 {0, 0, 4, 4, 4, 0, 0, 0, 0, 0},
988 {0, 0, 4, 4, 0, 0, 0, 0, 0, 0},
989 {0, 0, 4, 0, 0, 0, 0, 0, 0, 0},
990 {0, 0, 7, 7, 5, 0, 0, 7, 7, 0}
994 /* PUSCH 'K' value Table */
995 PUBLIC RgSchTddPuschTxKTbl rgSchTddPuschTxKTbl = {
996 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
997 {0, 6, 0, 0, 4, 0, 6, 0, 0, 4},
998 {0, 0, 0, 4, 0, 0, 0, 0, 4, 0},
999 {4, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1000 {0, 0, 0, 0, 0, 0, 0, 0, 4, 4},
1001 {0, 0, 0, 0, 0, 0, 0, 0, 4, 0},
1002 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1005 /* PDSCH to PUCCH Table for DL Harq Feed back. Based on the
1006 Downlink association set index 'K' table */
1007 PUBLIC U8 rgSchTddPucchTxTbl[7][10] = {
1008 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1009 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1010 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1011 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1012 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1013 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1014 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1017 /* Table to fetch the next DL sf idx for applying the
1018 new CFI. The next Dl sf Idx at which the new CFI
1019 is applied is always the starting Sf of the next ACK/NACK
1022 Ex: In Cfg-2, sf4 and sf9 are the only subframes at which
1023 a new ACK/NACK bundle of DL subframes can start
1025 D S U D D D S U D D D S U D D D S U D D
1028 dlSf Array for Cfg-2:
1029 sfNum: 0 1 3 4 5 6 8 9 0 1 3 4 5 6 8 9
1030 sfIdx: 0 1 2 3 4 5 6 7 8 9 10 11 12 12 14 15
1032 If CFI changes at sf0, nearest DL SF bundle >= 4 TTI is sf4
1033 So at sf4 the new CFI can be applied. To arrive at sf4 from
1034 sf0, the sfIdx has to be increased by 3 */
1036 PUBLIC U8 rgSchTddPdcchSfIncTbl[7][10] = {
1037 /* A/N Bundl: 0,1,5,6*/ {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
1038 /* A/N Bundl: 0,4,5,9*/ {2, 2, 0, 0, 3, 2, 2, 0, 0, 3},
1039 /* A/N Bundl: 4,9*/ {3, 6, 0, 5, 4, 3, 6, 0, 5, 4},
1040 /* A/N Bundl: 1,7,9*/ {4, 3, 0, 0, 0, 4, 5, 4, 6, 5},
1041 /* A/N Bundl: 0,6*/ {4, 3, 0, 0, 6, 5, 4, 7, 6, 5},
1042 /* A/N Bundl: 9*/ {8, 7, 0, 6, 5, 4, 12, 11, 10, 9},
1043 /* A/N Bundl: 0,1,5,6,9*/ {2, 1, 0, 0, 0, 2, 2, 0, 0, 3}
1047 /* combine compilation fixes */
1049 /* subframe offset values to be used when twoIntervalsConfig is enabled in UL
1051 PUBLIC RgSchTddSfOffTbl rgSchTddSfOffTbl = {
1052 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1053 {0, 0, 1, -1, 0, 0, 0, 1, -1, 0},
1054 {0, 0, 5, 0, 0, 0, 0, -5, 0, 0},
1055 {0, 0, 1, 1, -2, 0, 0, 0, 0, 0},
1056 {0, 0, 1, -1, 0, 0, 0, 0, 0, 0},
1057 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1058 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
1062 /* Table to determine when uplink SPS configured grants should
1063 * explicitly be reserved in a subframe. When enries are same
1064 * as that of Msg3SubfrmTbl, indicates competition with msg3.
1065 * As of now, this is same as Msg3SubfrmTbl (leaving out uldlcfg 2),
1066 * except that all 255s are now zeros. */
1067 PUBLIC RgSchTddSpsUlRsrvTbl rgSchTddSpsUlRsrvTbl = {
1068 {0, 0, 0, 6, 8, 0, 0, 0, 6, 8},
1069 {0, 0, 6, 9, 0, 0, 0, 6, 9, 0},
1070 {0, 0, 10, 0, 0, 0, 0, 10, 0, 0},
1071 {0, 0, 0, 0, 8, 0, 7, 7, 14, 0},
1072 {0, 0, 0, 9, 0, 0, 7, 15, 0, 0},
1073 {0, 0, 10, 0, 0, 0, 16, 0, 0, 0},
1074 {0, 0, 0, 0, 8, 0, 0, 0, 9, 0}
1077 /* Inverse DL Assoc Set index Table */
1078 PUBLIC RgSchTddInvDlAscSetIdxTbl rgSchTddInvDlAscSetIdxTbl = {
1079 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1080 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1081 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1082 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1083 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1084 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1085 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1088 #endif /* (LTEMAC_SPS ) */
1090 /* Number of Uplink subframes Table */
1091 PRIVATE U8 rgSchTddNumUlSf[] = {6, 4, 2, 3, 2, 1, 5};
1093 /* Downlink HARQ processes Table */
1094 PUBLIC RgSchTddUlNumHarqProcTbl rgSchTddUlNumHarqProcTbl = { 7, 4, 2, 3, 2, 1, 6};
1096 /* Uplink HARQ processes Table */
1097 PUBLIC RgSchTddDlNumHarqProcTbl rgSchTddDlNumHarqProcTbl = { 4, 7, 10, 9, 12, 15, 6};
1099 /* Downlink association index set 'K' value Table */
1100 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlAscSetIdxKTbl = {
1101 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1103 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1105 { {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}} },
1107 { {0, {0}}, {0, {0}}, {3, {7, 6, 11}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1109 { {0, {0}}, {0, {0}}, {4, {12, 8, 7, 11}}, {4, {6, 5, 4, 7}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1111 { {0, {0}}, {0, {0}}, {9, {13, 12, 9, 8, 7, 5, 4, 11, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1113 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1116 /* ccpu132282-ADD-the table rgSchTddDlAscSetIdxKTbl is rearranged in
1117 * decreasing order of Km, this is used to calculate the NCE used for
1118 * calculating N1Pucch Resource for Harq*/
1119 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlHqPucchResCalTbl = {
1120 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1122 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1124 { {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}} },
1126 { {0, {0}}, {0, {0}}, {3, {11, 7, 6}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1128 { {0, {0}}, {0, {0}}, {4, {12, 11, 8, 7}}, {4, {7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1130 { {0, {0}}, {0, {0}}, {9, {13, 12, 11, 9, 8, 7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1132 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1135 /* Minimum number of Ack/Nack feeback information to be
1136 stored for each UL-DL configuration */
1137 PUBLIC RgSchTddANFdbkMapTbl rgSchTddANFdbkMapTbl = {4, 4, 2, 3, 2, 1, 5};
1139 /* Uplink switch points and number of UL subframes Table */
1140 PUBLIC RgSchTddMaxUlSubfrmTbl rgSchTddMaxUlSubfrmTbl = {
1141 {2,3,3}, {2,2,2}, {2,1,1}, {1,3,0}, {1,2,0}, {1,1,0}, {2,3,2}
1144 /* Uplink switch points and number of DL subframes Table */
1145 PUBLIC RgSchTddMaxDlSubfrmTbl rgSchTddMaxDlSubfrmTbl = {
1146 {2,2,2}, {2,3,3}, {2,4,4}, {1,7,0}, {1,8,0}, {1,9,0}, {2,2,3}
1149 /* Number of UL subframes present before a particular subframe */
1150 PUBLIC RgSchTddNumUlSubfrmTbl rgSchTddNumUlSubfrmTbl = {
1151 {0, 0, 1, 2, 3, 3, 3, 4, 5, 6},
1152 {0, 0, 1, 2, 2, 2, 2, 3, 4, 4},
1153 {0, 0, 1, 1, 1, 1, 1, 2, 2, 2},
1154 {0, 0, 1, 2, 3, 3, 3, 3, 3, 3},
1155 {0, 0, 1, 2, 2, 2, 2, 2, 2, 2},
1156 {0, 0, 1, 1, 1, 1, 1, 1, 1, 1},
1157 {0, 0, 1, 2, 3, 3, 3, 4, 5, 5}
1160 /* Number of DL subframes present till a particular subframe */
1161 PUBLIC RgSchTddNumDlSubfrmTbl rgSchTddNumDlSubfrmTbl = {
1162 {1, 2, 2, 2, 2, 3, 4, 4, 4, 4},
1163 {1, 2, 2, 2, 3, 4, 5, 5, 5, 6},
1164 {1, 2, 2, 3, 4, 5, 6, 6, 7, 8},
1165 {1, 2, 2, 2, 2, 3, 4, 5, 6, 7},
1166 {1, 2, 2, 2, 3, 4, 5, 6, 7, 8},
1167 {1, 2, 2, 3, 4, 5, 6, 7, 8, 9},
1168 {1, 2, 2, 2, 2, 3, 4, 4, 4, 5}
1172 /* Nearest possible UL subframe Index from UL subframe
1173 * DL Index < UL Index */
1174 PUBLIC RgSchTddLowDlSubfrmIdxTbl rgSchTddLowDlSubfrmIdxTbl = {
1175 {0, 1, 1, 1, 1, 5, 6, 6, 6, 6},
1176 {0, 1, 1, 1, 4, 5, 6, 6, 6, 9},
1177 {0, 1, 1, 3, 4, 5, 6, 6, 8, 9},
1178 {0, 1, 1, 1, 1, 5, 6, 7, 8, 9},
1179 {0, 1, 1, 1, 4, 5, 6, 7, 8, 9},
1180 {0, 1, 1, 3, 4, 5, 6, 7, 8, 9},
1181 {0, 1, 1, 1, 1, 5, 6, 6, 6, 9}
1184 /* Nearest possible DL subframe Index from UL subframe
1185 * DL Index > UL Index
1186 * 10 represents Next SFN low DL Idx */
1187 PUBLIC RgSchTddHighDlSubfrmIdxTbl rgSchTddHighDlSubfrmIdxTbl = {
1188 {0, 1, 5, 5, 5, 5, 6, 10, 10, 10},
1189 {0, 1, 4, 4, 4, 5, 6, 9, 9, 9},
1190 {0, 1, 3, 3, 4, 5, 6, 8, 8, 9},
1191 {0, 1, 5, 5, 5, 5, 6, 7, 8, 9},
1192 {0, 1, 4, 4, 4, 5, 6, 7, 8, 9},
1193 {0, 1, 3, 3, 4, 5, 6, 7, 8, 9},
1194 {0, 1, 5, 5, 5, 5, 6, 9, 9, 9}
1197 /* RACH Message3 related information */
1198 PUBLIC RgSchTddMsg3SubfrmTbl rgSchTddMsg3SubfrmTbl = {
1199 {7, 6, 255, 255, 255, 7, 6, 255, 255, 255},
1200 {7, 6, 255, 255, 8, 7, 6, 255, 255, 8},
1201 {7, 6, 255, 9, 8, 7, 6, 255, 9, 8},
1202 {12, 11, 255, 255, 255, 7, 6, 6, 6, 13},
1203 {12, 11, 255, 255, 8, 7, 6, 6, 14, 13},
1204 {12, 11, 255, 9, 8, 7, 6, 15, 14, 13},
1205 {7, 6, 255, 255, 255, 7, 6, 255, 255, 8}
1208 /* ccpu00132341-DEL Removed rgSchTddRlsDlSubfrmTbl and used Kset table for
1209 * releasing DL HARQs */
1211 /* DwPTS Scheduling Changes Start */
1212 /* Provides the number of Cell Reference Signals in DwPTS
1214 PRIVATE U8 rgSchCmnDwptsCrs[2][3] = {/* [Spl Sf cfg][Ant Port] */
1215 {4, 8, 16}, /* Spl Sf cfg 1,2,3,6,7,8 */
1216 {6, 12, 20}, /* Spl Sf cfg 4 */
1219 PRIVATE S8 rgSchCmnSplSfDeltaItbs[9] = RG_SCH_DWPTS_ITBS_ADJ;
1220 /* DwPTS Scheduling Changes End */
1224 PRIVATE U32 rgSchCmnBsrTbl[64] = {
1225 0, 10, 12, 14, 17, 19, 22, 26,
1226 31, 36, 42, 49, 57, 67, 78, 91,
1227 107, 125, 146, 171, 200, 234, 274, 321,
1228 376, 440, 515, 603, 706, 826, 967, 1132,
1229 1326, 1552, 1817, 2127, 2490, 2915, 3413, 3995,
1230 4677, 5476, 6411, 7505, 8787, 10287, 12043, 14099,
1231 16507, 19325, 22624, 26487, 31009, 36304, 42502, 49759,
1232 58255, 68201, 79846, 93479, 109439, 128125, 150000, 220000
1235 PRIVATE U32 rgSchCmnExtBsrTbl[64] = {
1236 0, 10, 13, 16, 19, 23, 29, 35,
1237 43, 53, 65, 80, 98, 120, 147, 181,
1238 223, 274, 337, 414, 509, 625, 769, 945,
1239 1162, 1429, 1757, 2161, 2657, 3267, 4017, 4940,
1240 6074, 7469, 9185, 11294, 13888, 17077, 20999, 25822,
1241 31752, 39045, 48012, 59039, 72598, 89272, 109774, 134986,
1242 165989, 204111, 250990, 308634, 379519, 466683, 573866, 705666,
1243 867737, 1067031, 1312097, 1613447, 1984009, 2439678, 3000000, 3100000
1247 PRIVATE U8 rgSchCmnUlRvIdxToIMcsTbl[4] = {32, 30, 31, 29};
1249 PUBLIC U8 rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_MAX_CP][RG_SCH_CMN_UL_NUM_CQI];
1251 PUBLIC RgSchTbSzTbl rgTbSzTbl = {
1253 {16, 32, 56, 88, 120, 152, 176, 208, 224, 256, 288, 328, 344, 376, 392, 424, 456, 488, 504, 536, 568, 600, 616, 648, 680, 712, 744, 776, 776, 808, 840, 872, 904, 936, 968, 1000, 1032, 1032, 1064, 1096, 1128, 1160, 1192, 1224, 1256, 1256, 1288, 1320, 1352, 1384, 1416, 1416, 1480, 1480, 1544, 1544, 1608, 1608, 1608, 1672, 1672, 1736, 1736, 1800, 1800, 1800, 1864, 1864, 1928, 1928, 1992, 1992, 2024, 2088, 2088, 2088, 2152, 2152, 2216, 2216, 2280, 2280, 2280, 2344, 2344, 2408, 2408, 2472, 2472, 2536, 2536, 2536, 2600, 2600, 2664, 2664, 2728, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 2984, 2984, 3112},
1254 {24, 56, 88, 144, 176, 208, 224, 256, 328, 344, 376, 424, 456, 488, 520, 568, 600, 632, 680, 712, 744, 776, 808, 872, 904, 936, 968, 1000, 1032, 1064, 1128, 1160, 1192, 1224, 1256, 1288, 1352, 1384, 1416, 1416, 1480, 1544, 1544, 1608, 1608, 1672, 1736, 1736, 1800, 1800, 1864, 1864, 1928, 1992, 1992, 2024, 2088, 2088, 2152, 2152, 2216, 2280, 2280, 2344, 2344, 2408, 2472, 2472, 2536, 2536, 2600, 2600, 2664, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008},
1255 {32, 72, 144, 176, 208, 256, 296, 328, 376, 424, 472, 520, 568, 616, 648, 696, 744, 776, 840, 872, 936, 968, 1000, 1064, 1096, 1160, 1192, 1256, 1288, 1320, 1384, 1416, 1480, 1544, 1544, 1608, 1672, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2024, 2088, 2088, 2152, 2216, 2216, 2280, 2344, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2664, 2728, 2792, 2856, 2856, 2856, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968},
1256 {40, 104, 176, 208, 256, 328, 392, 440, 504, 568, 616, 680, 744, 808, 872, 904, 968, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1864, 1928, 1992, 2024, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6200, 6456, 6456},
1257 {56, 120, 208, 256, 328, 408, 488, 552, 632, 696, 776, 840, 904, 1000, 1064, 1128, 1192, 1288, 1352, 1416, 1480, 1544, 1608, 1736, 1800, 1864, 1928, 1992, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2600, 2664, 2728, 2792, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992},
1258 {72, 144, 224, 328, 424, 504, 600, 680, 776, 872, 968, 1032, 1128, 1224, 1320, 1384, 1480, 1544, 1672, 1736, 1864, 1928, 2024, 2088, 2216, 2280, 2344, 2472, 2536, 2664, 2728, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4264, 4392, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9528},
1259 {328, 176, 256, 392, 504, 600, 712, 808, 936, 1032, 1128, 1224, 1352, 1480, 1544, 1672, 1736, 1864, 1992, 2088, 2216, 2280, 2408, 2472, 2600, 2728, 2792, 2984, 2984, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3880, 4008, 4136, 4136, 4264, 4392, 4584, 4584, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448},
1260 {104, 224, 328, 472, 584, 712, 840, 968, 1096, 1224, 1320, 1480, 1608, 1672, 1800, 1928, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536},
1261 {120, 256, 392, 536, 680, 808, 968, 1096, 1256, 1384, 1544, 1672, 1800, 1928, 2088, 2216, 2344, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15264},
1262 {136, 296, 456, 616, 776, 936, 1096, 1256, 1416, 1544, 1736, 1864, 2024, 2216, 2344, 2536, 2664, 2856, 2984, 3112, 3368, 3496, 3624, 3752, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 5160, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568},
1263 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080},
1264 {176, 376, 584, 776, 1000, 1192, 1384, 1608, 1800, 2024, 2216, 2408, 2600, 2792, 2984, 3240, 3496, 3624, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152},
1265 {208, 440, 680, 904, 1128, 1352, 1608, 1800, 2024, 2280, 2472, 2728, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 23688, 24496, 24496, 24496, 24496, 25456},
1266 {224, 488, 744, 1000, 1256, 1544, 1800, 2024, 2280, 2536, 2856, 3112, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 26416, 27376, 27376, 27376, 27376, 28336, 28336},
1267 {256, 552, 840, 1128, 1416, 1736, 1992, 2280, 2600, 2856, 3112, 3496, 3752, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 5992, 6200, 6456, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704},
1268 {280, 600, 904, 1224, 1544, 1800, 2152, 2472, 2728, 3112, 3368, 3624, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 6200, 6456, 6712, 6968, 7224, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008},
1269 {328, 632, 968, 1288, 1608, 1928, 2280, 2600, 2984, 3240, 3624, 3880, 4264, 4584, 4968, 5160, 5544, 5992, 6200, 6456, 6712, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 35160},
1270 {336, 696, 1064, 1416, 1800, 2152, 2536, 2856, 3240, 3624, 4008, 4392, 4776, 5160, 5352, 5736, 6200, 6456, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 39232},
1271 {376, 776, 1160, 1544, 1992, 2344, 2792, 3112, 3624, 4008, 4392, 4776, 5160, 5544, 5992, 6200, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14112, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816},
1272 {408, 840, 1288, 1736, 2152, 2600, 2984, 3496, 3880, 4264, 4776, 5160, 5544, 5992, 6456, 6968, 7224, 7736, 8248, 8504, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19080, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888},
1273 {440, 904, 1384, 1864, 2344, 2792, 3240, 3752, 4136, 4584, 5160, 5544, 5992, 6456, 6968, 7480, 7992, 8248, 8760, 9144, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 20616, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 48936, 51024, 51024, 51024},
1274 {488, 1000, 1480, 1992, 2472, 2984, 3496, 4008, 4584, 4968, 5544, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 9912, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056},
1275 {520, 1064, 1608, 2152, 2664, 3240, 3752, 4264, 4776, 5352, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 10296, 10680, 11448, 11832, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 19080, 19080, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256},
1276 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776},
1277 {584, 1192, 1800, 2408, 2984, 3624, 4264, 4968, 5544, 5992, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 22920, 23688, 24496, 25456, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592},
1278 {616, 1256, 1864, 2536, 3112, 3752, 4392, 5160, 5736, 6200, 6968, 7480, 8248, 8760, 9528, 10296, 10680, 11448, 12216, 12576, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592, 68808, 68808, 68808, 71112},
1279 {712, 1480, 2216, 2984, 3752, 4392, 5160, 5992, 6712, 7480, 8248, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 14688, 15264, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376}
1282 {32, 88, 152, 208, 256, 328, 376, 424, 488, 536, 600, 648, 712, 776, 808, 872, 936, 1000, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2088, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 5992, 6200},
1283 {56, 144, 208, 256, 344, 424, 488, 568, 632, 712, 776, 872, 936, 1000, 1064, 1160, 1224, 1288, 1384, 1416, 1544, 1608, 1672, 1736, 1800, 1864, 1992, 2024, 2088, 2152, 2280, 2344, 2408, 2472, 2536, 2600, 2728, 2792, 2856, 2856, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5352, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992},
1284 {72, 176, 256, 328, 424, 520, 616, 696, 776, 872, 968, 1064, 1160, 1256, 1320, 1416, 1544, 1608, 1672, 1800, 1864, 1992, 2088, 2152, 2216, 2344, 2408, 2536, 2600, 2664, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3368, 3496, 3624, 3624, 3752, 3880, 4008, 4008, 4136, 4264, 4264, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912},
1285 {104, 208, 328, 440, 568, 680, 808, 904, 1032, 1160, 1256, 1384, 1480, 1608, 1736, 1864, 1992, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2856, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4392, 4584, 4776, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12576, 12960, 12960},
1286 {120, 256, 408, 552, 696, 840, 1000, 1128, 1288, 1416, 1544, 1736, 1864, 1992, 2152, 2280, 2408, 2600, 2728, 2856, 2984, 3112, 3240, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840},
1287 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19080},
1288 {176, 392, 600, 808, 1032, 1224, 1480, 1672, 1864, 2088, 2280, 2472, 2728, 2984, 3112, 3368, 3496, 3752, 4008, 4136, 4392, 4584, 4776, 4968, 5160, 5352, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7992, 8248, 8248, 8504, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920},
1289 {224, 472, 712, 968, 1224, 1480, 1672, 1928, 2216, 2472, 2664, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376},
1290 {256, 536, 808, 1096, 1384, 1672, 1928, 2216, 2536, 2792, 3112, 3368, 3624, 3880, 4264, 4584, 4776, 4968, 5352, 5544, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576},
1291 {296, 616, 936, 1256, 1544, 1864, 2216, 2536, 2856, 3112, 3496, 3752, 4136, 4392, 4776, 5160, 5352, 5736, 5992, 6200, 6712, 6968, 7224, 7480, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160},
1292 {328, 680, 1032, 1384, 1736, 2088, 2472, 2792, 3112, 3496, 3880, 4264, 4584, 4968, 5352, 5736, 5992, 6200, 6712, 6968, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 21384, 21384, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888},
1293 {376, 776, 1192, 1608, 2024, 2408, 2792, 3240, 3624, 4008, 4392, 4776, 5352, 5736, 5992, 6456, 6968, 7224, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816},
1294 {440, 904, 1352, 1800, 2280, 2728, 3240, 3624, 4136, 4584, 4968, 5544, 5992, 6456, 6712, 7224, 7736, 8248, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 17568, 17568, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024},
1295 {488, 1000, 1544, 2024, 2536, 3112, 3624, 4136, 4584, 5160, 5736, 6200, 6712, 7224, 7736, 8248, 8760, 9144, 9912, 10296, 10680, 11448, 11832, 12216, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336},
1296 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776},
1297 {600, 1224, 1800, 2472, 3112, 3624, 4264, 4968, 5544, 6200, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 23688, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808},
1298 {632, 1288, 1928, 2600, 3240, 3880, 4584, 5160, 5992, 6456, 7224, 7736, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 68808, 71112, 71112, 71112, 71112},
1299 {696, 1416, 2152, 2856, 3624, 4392, 5160, 5736, 6456, 7224, 7992, 8760, 9528, 10296, 10680, 11448, 12216, 12960, 13536, 14688, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 71112, 73712, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 78704},
1300 {776, 1544, 2344, 3112, 4008, 4776, 5544, 6200, 7224, 7992, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 27376, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936},
1301 {840, 1736, 2600, 3496, 4264, 5160, 5992, 6968, 7736, 8504, 9528, 10296, 11064, 12216, 12960, 13536, 14688, 15264, 16416, 16992, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800},
1302 {904, 1864, 2792, 3752, 4584, 5544, 6456, 7480, 8248, 9144, 10296, 11064, 12216, 12960, 14112, 14688, 15840, 16992, 17568, 18336, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 42368, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 97896, 101840, 101840, 101840},
1303 {1000, 1992, 2984, 4008, 4968, 5992, 6968, 7992, 9144, 9912, 11064, 12216, 12960, 14112, 15264, 15840, 16992, 18336, 19080, 19848, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 31704, 32856, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136},
1304 {1064, 2152, 3240, 4264, 5352, 6456, 7480, 8504, 9528, 10680, 11832, 12960, 14112, 15264, 16416, 16992, 18336, 19080, 20616, 21384, 22920, 23688, 24496, 25456, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816},
1305 {1128, 2280, 3496, 4584, 5736, 6968, 7992, 9144, 10296, 11448, 12576, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 45352, 45352, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840,101840,101840,101840,105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496},
1306 {1192, 2408, 3624, 4968, 5992, 7224, 8504, 9912, 11064, 12216, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 42368, 43816, 45352, 46888, 46888, 48936, 51024, 51024, 52752, 52752, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208},
1307 {1256, 2536, 3752, 5160, 6200, 7480, 8760, 10296, 11448, 12576, 14112, 15264, 16416, 17568, 19080, 20616, 21384, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 43816, 43816, 45352, 46888, 48936, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040,115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208, 137792, 137792, 137792, 142248},
1308 {1480, 2984, 4392, 5992, 7480, 8760, 10296, 11832, 13536, 14688, 16416, 17568, 19080, 20616, 22152, 23688, 25456, 26416, 28336, 29296, 30576, 32856, 34008, 35160, 36696, 37888, 40576, 40576, 42368, 43816, 45352, 46888, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 63776, 63776, 66592, 68808, 68808, 71112, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 81176, 84760, 84760, 87936, 87936, 90816, 90816, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 128496, 128496, 128496, 133208, 133208, 133208, 137792, 137792, 137792, 142248, 142248, 142248, 146856, 146856,149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776}
1311 RgSchUlIMcsTbl rgUlIMcsTbl = {
1312 {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5},
1313 {2, 6}, {2, 7}, {2, 8}, {2, 9}, {2, 10},
1314 {4, 10}, {4, 11}, {4, 12}, {4, 13}, {4, 14},
1315 {4, 15}, {4, 16}, {4, 17}, {4, 18}, {4, 19},
1316 {6, 19}, {6, 20}, {6, 21}, {6, 22}, {6, 23},
1317 {6, 24}, {6, 25}, {6, 26}
1319 RgSchUeCatTbl rgUeCatTbl = {
1320 /*Column1:Maximum number of bits of an UL-SCH
1321 transport block transmitted within a TTI
1323 Column2:Maximum number of bits of a DLSCH
1324 transport block received within a TTI
1326 Column3:Total number of soft channel bits
1328 Column4:Support for 64QAM in UL
1330 Column5:Maximum number of DL-SCH transport
1331 block bits received within a TTI
1333 Column6:Maximum number of supported layers for
1334 spatial multiplexing in DL
1336 {5160, {10296,0}, 250368, FALSE, 10296, 1},
1337 {25456, {51024,0}, 1237248, FALSE, 51024, 2},
1338 {51024, {75376,0}, 1237248, FALSE, 102048, 2},
1339 {51024, {75376,0}, 1827072, FALSE, 150752, 2},
1340 {75376, {149776,0}, 3667200, TRUE, 299552, 4},
1341 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1342 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1343 {149776,{299856,0}, 35982720,TRUE, 2998560, 8}
1346 /* [ccpu00138532]-ADD-The below table stores the min HARQ RTT time
1347 in Downlink for TDD and FDD. Indices 0 to 6 map to tdd UL DL config 0-6.
1348 Index 7 map to FDD */
1349 U8 rgSchCmnHarqRtt[8] = {4,7,10,9,12,15,6,8};
1350 /* Number of CFI Switchover Index is equals to 7 TDD Indexes + 1 FDD index */
1351 U8 rgSchCfiSwitchOvrWinLen[] = {7, 4, 2, 3, 2, 1, 6, 8};
1353 /* EffTbl is calculated for single layer and two layers.
1354 * CqiToTbs is calculated for single layer and two layers */
1355 RgSchCmnTbSzEff rgSchCmnNorCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1356 RgSchCmnTbSzEff rgSchCmnNorCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1357 /* New variable to store UL effiency values for normal and extended CP*/
1358 RgSchCmnTbSzEff rgSchCmnNorUlEff[1],rgSchCmnExtUlEff[1];
1359 RgSchCmnCqiToTbs rgSchCmnNorCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1360 RgSchCmnCqiToTbs rgSchCmnNorCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1361 RgSchCmnCqiToTbs *rgSchCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
1362 RgSchCmnTbSzEff rgSchCmnExtCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1363 RgSchCmnTbSzEff rgSchCmnExtCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1364 RgSchCmnCqiToTbs rgSchCmnExtCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1365 RgSchCmnCqiToTbs rgSchCmnExtCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1366 /* Include CRS REs while calculating Efficiency */
1367 RgSchCmnTbSzEff *rgSchCmnEffTbl[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_ANT_CONF][RG_SCH_CMN_MAX_CFI];
1368 RgSchCmnTbSzEff *rgSchCmnUlEffTbl[RG_SCH_CMN_MAX_CP];
1370 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3, 1};
1372 /* Added matrix 'rgRaPrmblToRaFrmTbl'for computation of RA sub-frames from RA preamble */
1373 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3};
1376 EXTERN RgUlSchdInits rgSchUlSchdInits;
1377 EXTERN RgDlSchdInits rgSchDlSchdInits;
1378 EXTERN RgDlfsSchdInits rgSchDlfsSchdInits;
1380 EXTERN RgEmtcUlSchdInits rgSchEmtcUlSchdInits;
1381 EXTERN RgEmtcDlSchdInits rgSchEmtcDlSchdInits;
1385 PRIVATE S16 rgSCHCmnUeIdleExdThrsld ARGS((
1389 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe ARGS((
1393 PRIVATE Void rgSCHCmnDelDedPreamble ARGS((
1397 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe ARGS((
1400 CmLteTimingInfo timingInfo
1402 PRIVATE Void rgSCHCmnDelRachInfo ARGS((
1406 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe ARGS((
1412 PRIVATE Void rgSCHCmnHdlHoPo ARGS((
1414 CmLListCp *raRspLst,
1415 RgSchRaReqInfo *raReq
1417 PRIVATE Void rgSCHCmnAllocPoHoGrnt ARGS((
1419 CmLListCp *raRspLst,
1421 RgSchRaReqInfo *raReq
1423 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf ARGS((
1430 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ ARGS((
1434 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ ARGS((
1438 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx ARGS((
1441 PRIVATE Void rgSCHCmnUpdRachParam ARGS((
1444 PRIVATE S16 rgSCHCmnAllocPOParam ARGS((
1452 PRIVATE Void rgSCHCmnGenPdcchOrder ARGS((
1456 PRIVATE Void rgSCHCmnCfgRachDedPrm ARGS((
1461 PRIVATE Void rgSCHCmnHdlUlInactUes ARGS((
1464 PRIVATE Void rgSCHCmnHdlDlInactUes ARGS((
1467 PRIVATE Void rgSCHCmnUlInit ARGS((Void
1469 PRIVATE Void rgSCHCmnDlInit ARGS((Void
1471 PRIVATE Void rgSCHCmnInitDlRbAllocInfo ARGS((
1472 RgSchCmnDlRbAllocInfo *allocInfo
1474 PRIVATE Void rgSCHCmnUpdUlCompEffBsr ARGS((
1478 PRIVATE Void rgSCHCmnUlSetAllUnSched ARGS((
1479 RgSchCmnUlRbAllocInfo *allocInfo
1481 PRIVATE Void rgSCHCmnUlUpdSf ARGS((
1483 RgSchCmnUlRbAllocInfo *allocInfo,
1486 PRIVATE Void rgSCHCmnUlHndlAllocRetx ARGS((
1488 RgSchCmnUlRbAllocInfo *allocInfo,
1493 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch ARGS((
1497 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch ARGS((
1501 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ ARGS((
1505 PRIVATE S16 rgSCHCmnTmrExpiry ARGS((
1506 PTR cb, /* Pointer to timer control block */
1507 S16 tmrEvnt /* Timer Event */
1509 PRIVATE S16 rgSCHCmnTmrProc ARGS((
1512 PRIVATE Void rgSCHCmnAddUeToRefreshQ ARGS((
1517 PRIVATE Void rgSCHCmnDlCcchRetx ARGS((
1519 RgSchCmnDlRbAllocInfo *allocInfo
1521 PRIVATE Void rgSCHCmnUpdUeMimoInfo ARGS((
1525 RgSchCmnCell *cellSchd
1527 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo ARGS((
1531 RgSchCmnUe *ueSchCmn,
1532 RgSchCmnCell *cellSchd,
1536 PRIVATE Void rgSCHCmnDlCcchSduRetx ARGS((
1538 RgSchCmnDlRbAllocInfo *allocInfo
1540 PRIVATE Void rgSCHCmnDlCcchSduTx ARGS((
1542 RgSchCmnDlRbAllocInfo *allocInfo
1544 PRIVATE S16 rgSCHCmnCcchSduAlloc ARGS((
1547 RgSchCmnDlRbAllocInfo *allocInfo
1549 PRIVATE S16 rgSCHCmnCcchSduDedAlloc ARGS((
1553 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc ARGS((
1559 PRIVATE Void rgSCHCmnInitVars ARGS((
1563 /*ccpu00117180 - DEL - Moved rgSCHCmnUpdVars to .x as its access is now PUBLIC */
1564 PRIVATE Void rgSCHCmnUlRbAllocForLst ARGS((
1570 CmLListCp *nonSchdLst,
1573 PRIVATE S16 rgSCHCmnUlRbAllocForUe ARGS((
1580 PRIVATE Void rgSCHCmnMsg3GrntReq ARGS((
1584 RgSchUlHqProcCb *hqProc,
1585 RgSchUlAlloc **ulAllocRef,
1588 PRIVATE Void rgSCHCmnUlNonadapRetx ARGS((
1589 RgSchCmnUlCell *cellUl,
1590 RgSchUlAlloc *alloc,
1594 PRIVATE Void rgSCHCmnDlCcchRarAlloc ARGS((
1597 PRIVATE Void rgSCHCmnDlCcchTx ARGS((
1599 RgSchCmnDlRbAllocInfo *allocInfo
1601 PRIVATE Void rgSCHCmnDlBcchPcch ARGS((
1603 RgSchCmnDlRbAllocInfo *allocInfo,
1604 RgInfSfAlloc *subfrmAlloc
1606 PUBLIC Bool rgSCHCmnChkInWin ARGS((
1607 CmLteTimingInfo frm,
1608 CmLteTimingInfo start,
1611 PUBLIC Bool rgSCHCmnChkPastWin ARGS((
1612 CmLteTimingInfo frm,
1615 PRIVATE Void rgSCHCmnClcAlloc ARGS((
1618 RgSchClcDlLcCb *lch,
1620 RgSchCmnDlRbAllocInfo *allocInfo
1623 PRIVATE Void rgSCHCmnClcRbAlloc ARGS((
1634 PRIVATE S16 rgSCHCmnMsg4Alloc ARGS((
1637 RgSchCmnDlRbAllocInfo *allocInfo
1639 PRIVATE S16 rgSCHCmnMsg4DedAlloc ARGS((
1643 PRIVATE Void rgSCHCmnDlRaRsp ARGS((
1645 RgSchCmnDlRbAllocInfo *allocInfo
1647 PRIVATE S16 rgSCHCmnRaRspAlloc ARGS((
1653 RgSchCmnDlRbAllocInfo *allocInfo
1655 PRIVATE Void rgSCHCmnUlUeDelAllocs ARGS((
1659 PRIVATE Void rgSCHCmnDlSetUeAllocLmt ARGS((
1664 PRIVATE S16 rgSCHCmnDlRgrCellCfg ARGS((
1669 PRIVATE Void rgSCHCmnUlAdapRetx ARGS((
1670 RgSchUlAlloc *alloc,
1671 RgSchUlHqProcCb *proc
1673 PRIVATE Void rgSCHCmnUlUpdAllocRetx ARGS((
1677 PRIVATE Void rgSCHCmnUlSfReTxAllocs ARGS((
1681 /* Fix: syed Adaptive Msg3 Retx crash. */
1682 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs ARGS((
1688 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1692 RgrUeRecfg *ueRecfg,
1696 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1706 * DL RB allocation specific functions
1709 PRIVATE Void rgSCHCmnDlRbAlloc ARGS((
1711 RgSchCmnDlRbAllocInfo *allocInfo
1713 PRIVATE Void rgSCHCmnNonDlfsRbAlloc ARGS((
1715 RgSchCmnDlRbAllocInfo *allocInfo
1717 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc ARGS((
1719 RgSchDlRbAlloc *cmnAllocInfo));
1722 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj ARGS((
1724 RgSchDlRbAlloc *cmnAllocInfo,
1728 /* Added function to adjust TBSize*/
1729 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj ARGS((
1730 RgSchDlRbAlloc *allocInfo,
1731 U8 numOvrlapgPbchRb,
1737 /* Added function to find num of overlapping PBCH rb*/
1738 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs ARGS((
1741 RgSchDlRbAlloc *allocInfo,
1742 U8 *numOvrlapgPbchRb
1745 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl ARGS((
1748 RgSchDlRbAlloc *allocInfo
1751 PRIVATE Void rgSCHCmnFindCodeRate ARGS((
1754 RgSchDlRbAlloc *allocInfo,
1759 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc ARGS((
1761 RgSchCmnMsg4RbAlloc *msg4AllocInfo,
1764 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc ARGS((
1770 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc ARGS((
1777 PRIVATE U32 rgSCHCmnCalcRiv ARGS(( U8 bw,
1783 PRIVATE Void rgSCHCmnUpdHqAndDai ARGS((
1784 RgSchDlHqProcCb *hqP,
1786 RgSchDlHqTbCb *tbCb,
1789 PRIVATE S16 rgSCHCmnUlCalcAvailBw ARGS((
1791 RgrCellCfg *cellCfg,
1796 PRIVATE S16 rgSCHCmnDlKdashUlAscInit ARGS((
1799 PRIVATE S16 rgSCHCmnDlANFdbkInit ARGS((
1802 PRIVATE S16 rgSCHCmnDlNpValInit ARGS((
1805 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst ARGS((
1808 PRIVATE S16 rgSCHCmnDlCpyRachInfo ARGS((
1810 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
1813 PRIVATE S16 rgSCHCmnDlRachInfoInit ARGS((
1816 PRIVATE S16 rgSCHCmnDlPhichOffsetInit ARGS((
1821 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt ARGS
1827 PRIVATE RgSchCmnRank rgSCHCmnComputeRank ARGS
1834 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3 ARGS
1839 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3 ARGS
1844 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4 ARGS
1849 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4 ARGS
1854 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr ARGS
1861 /* comcodsepa : start */
1864 * @brief This function computes efficiency and stores in a table.
1868 * Function: rgSCHCmnCompEff
1869 * Purpose: this function computes the efficiency as number of
1870 * bytes per 1024 symbols. The CFI table is also filled
1871 * with the same information such that comparison is valid
1873 * Invoked by: Scheduler
1875 * @param[in] U8 noPdcchSym
1876 * @param[in] U8 cpType
1877 * @param[in] U8 txAntIdx
1878 * @param[in] RgSchCmnTbSzEff* effTbl
1883 PRIVATE Void rgSCHCmnCompEff
1888 RgSchCmnTbSzEff *effTbl
1891 PRIVATE Void rgSCHCmnCompEff(noPdcchSym, cpType, txAntIdx, effTbl)
1895 RgSchCmnTbSzEff *effTbl;
1900 U8 resOfCrs; /* Effective REs occupied by CRS */
1903 TRC2(rgSCHCmnCompEff);
1907 case RG_SCH_CMN_NOR_CP:
1910 case RG_SCH_CMN_EXT_CP:
1914 /* Generate a log error. This case should never be executed */
1918 /* Depending on the Tx Antenna Index, deduct the
1919 * Resource elements for the CRS */
1923 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
1926 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
1929 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
1932 /* Generate a log error. This case should never be executed */
1935 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
1936 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
1939 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
1941 /* This line computes the coding efficiency per 1024 REs */
1942 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
1944 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
1949 * @brief This function computes efficiency and stores in a table.
1953 * Function: rgSCHCmnCompUlEff
1954 * Purpose: this function computes the efficiency as number of
1955 * bytes per 1024 symbols. The CFI table is also filled
1956 * with the same information such that comparison is valid
1958 * Invoked by: Scheduler
1960 * @param[in] U8 noUlRsSym
1961 * @param[in] U8 cpType
1962 * @param[in] U8 txAntIdx
1963 * @param[in] RgSchCmnTbSzEff* effTbl
1968 PRIVATE Void rgSCHCmnCompUlEff
1972 RgSchCmnTbSzEff *effTbl
1975 PRIVATE Void rgSCHCmnCompUlEff(noUlRsSym, cpType, effTbl)
1978 RgSchCmnTbSzEff *effTbl;
1985 TRC2(rgSCHCmnCompUlEff);
1989 case RG_SCH_CMN_NOR_CP:
1992 case RG_SCH_CMN_EXT_CP:
1996 /* Generate a log error. This case should never be executed */
2000 noResPerRb = ((noSymPerRb - noUlRsSym) * RB_SCH_CMN_NUM_SCS_PER_RB);
2001 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2004 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2006 /* This line computes the coding efficiency per 1024 REs */
2007 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
2009 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
2015 * @brief This function computes efficiency for 2 layers and stores in a table.
2019 * Function: rgSCHCmn2LyrCompEff
2020 * Purpose: this function computes the efficiency as number of
2021 * bytes per 1024 symbols. The CFI table is also filled
2022 * with the same information such that comparison is valid
2024 * Invoked by: Scheduler
2026 * @param[in] U8 noPdcchSym
2027 * @param[in] U8 cpType
2028 * @param[in] U8 txAntIdx
2029 * @param[in] RgSchCmnTbSzEff* effTbl2Lyr
2034 PRIVATE Void rgSCHCmn2LyrCompEff
2039 RgSchCmnTbSzEff *effTbl2Lyr
2042 PRIVATE Void rgSCHCmn2LyrCompEff(noPdcchSym, cpType, txAntIdx, effTbl2Lyr)
2046 RgSchCmnTbSzEff *effTbl2Lyr;
2051 U8 resOfCrs; /* Effective REs occupied by CRS */
2054 TRC2(rgSCHCmn2LyrCompEff);
2058 case RG_SCH_CMN_NOR_CP:
2061 case RG_SCH_CMN_EXT_CP:
2065 /* Generate a log error. This case should never be executed */
2069 /* Depending on the Tx Antenna Index, deduct the
2070 * Resource elements for the CRS */
2074 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
2077 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
2080 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
2083 /* Generate a log error. This case should never be executed */
2087 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
2088 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2090 (*effTbl2Lyr)[i] = 0;
2091 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2093 /* This line computes the coding efficiency per 1024 REs */
2094 (*effTbl2Lyr)[i] += (rgTbSzTbl[1][i][j] * 1024) / (noResPerRb * (j+1));
2096 (*effTbl2Lyr)[i] /= RG_SCH_CMN_NUM_RBS;
2103 * @brief This function initializes the rgSchCmnDciFrmtSizes table.
2107 * Function: rgSCHCmnGetDciFrmtSizes
2108 * Purpose: This function determines the sizes of all
2109 * the available DCI Formats. The order of
2110 * bits addition for each format is inaccordance
2112 * Invoked by: rgSCHCmnRgrCellCfg
2118 PRIVATE Void rgSCHCmnGetDciFrmtSizes
2123 PRIVATE Void rgSCHCmnGetDciFrmtSizes(cell)
2128 TRC2(rgSCHCmnGetDciFrmtSizes);
2130 /* DCI Format 0 size determination */
2131 rgSchCmnDciFrmtSizes[0] = 1 +
2133 rgSCHUtlLog32bitNbase2((cell->bwCfg.ulTotalBw * \
2134 (cell->bwCfg.ulTotalBw + 1))/2) +
2144 /* DCI Format 1 size determination */
2145 rgSchCmnDciFrmtSizes[1] = 1 +
2146 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2151 4 + 2 + /* HqProc Id and DAI */
2157 /* DCI Format 1A size determination */
2158 rgSchCmnDciFrmtSizes[2] = 1 + /* Flag for format0/format1a differentiation */
2159 1 + /* Localized/distributed VRB assignment flag */
2162 3 + /* Harq process Id */
2164 4 + /* Harq process Id */
2165 2 + /* UL Index or DAI */
2167 1 + /* New Data Indicator */
2170 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2171 (cell->bwCfg.dlTotalBw + 1))/2);
2172 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
2173 Since VRB is local */
2175 /* DCI Format 1B size determination */
2176 rgSchCmnDciFrmtSizes[3] = 1 +
2177 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2178 (cell->bwCfg.dlTotalBw + 1))/2) +
2188 ((cell->numTxAntPorts == 4)? 4:2) +
2191 /* DCI Format 1C size determination */
2192 /* Approximation: NDLVrbGap1 ~= Nprb for DL */
2193 rgSchCmnDciFrmtSizes[4] = (cell->bwCfg.dlTotalBw < 50)? 0:1 +
2194 (cell->bwCfg.dlTotalBw < 50)?
2195 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/2 * \
2196 (cell->bwCfg.dlTotalBw/2 + 1))/2)) :
2197 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/4 * \
2198 (cell->bwCfg.dlTotalBw/4 + 1))/2)) +
2201 /* DCI Format 1D size determination */
2202 rgSchCmnDciFrmtSizes[5] = 1 +
2203 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2204 (cell->bwCfg.dlTotalBw + 1))/2) +
2213 ((cell->numTxAntPorts == 4)? 4:2) +
2216 /* DCI Format 2 size determination */
2217 rgSchCmnDciFrmtSizes[6] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2218 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2226 ((cell->numTxAntPorts == 4)? 6:3);
2228 /* DCI Format 2A size determination */
2229 rgSchCmnDciFrmtSizes[7] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2230 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2238 ((cell->numTxAntPorts == 4)? 2:0);
2240 /* DCI Format 3 size determination */
2241 rgSchCmnDciFrmtSizes[8] = rgSchCmnDciFrmtSizes[0];
2243 /* DCI Format 3A size determination */
2244 rgSchCmnDciFrmtSizes[9] = rgSchCmnDciFrmtSizes[0];
2251 * @brief This function initializes the cmnCell->dciAggrLvl table.
2255 * Function: rgSCHCmnGetCqiDciFrmt2AggrLvl
2256 * Purpose: This function determines the Aggregation level
2257 * for each CQI level against each DCI format.
2258 * Invoked by: rgSCHCmnRgrCellCfg
2264 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl
2269 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl(cell)
2273 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2277 TRC2(rgSCHCmnGetCqiDciFrmt2AggrLvl);
2279 for (i = 0; i < RG_SCH_CMN_MAX_CQI; i++)
2281 for (j = 0; j < 10; j++)
2283 U32 pdcchBits; /* Actual number of phy bits needed for a given DCI Format
2284 * for a given CQI Level */
2285 pdcchBits = (rgSchCmnDciFrmtSizes[j] * 1024)/rgSchCmnCqiPdcchEff[i];
2287 if (pdcchBits < 192)
2289 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL2;
2292 if (pdcchBits < 384)
2294 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL4;
2297 if (pdcchBits < 768)
2299 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL8;
2302 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL16;
2309 * @brief This function initializes all the data for the scheduler.
2313 * Function: rgSCHCmnDlInit
2314 * Purpose: This function initializes the following information:
2315 * 1. Efficiency table
2316 * 2. CQI to table index - It is one row for upto 3 RBs
2317 * and another row for greater than 3 RBs
2318 * currently extended prefix is compiled out.
2319 * Invoked by: MAC intialization code..may be ActvInit
2325 PRIVATE Void rgSCHCmnDlInit
2329 PRIVATE Void rgSCHCmnDlInit()
2336 RgSchCmnTbSzEff *effTbl;
2337 RgSchCmnCqiToTbs *tbsTbl;
2339 TRC2(rgSCHCmnDlInit);
2341 /* 0 corresponds to Single layer case, 1 corresponds to 2 layers case*/
2342 /* Init Efficiency table for normal cyclic prefix */
2343 /*Initialize Efficiency table for Layer Index 0 */
2344 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2345 /*Initialize Efficiency table for each of the CFI indices. The
2346 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2347 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[0];
2348 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[0];
2349 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[0];
2350 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[0];
2351 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2352 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[0];
2353 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[0];
2354 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[0];
2355 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[0];
2356 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2357 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[0];
2358 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[0];
2359 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[0];
2360 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[0];
2362 /*Initialize CQI to TBS table for Layer Index 0 for Normal CP */
2363 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[0];
2364 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[0];
2365 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[0];
2366 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[0];
2368 /*Intialize Efficency table for Layer Index 1 */
2369 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2370 /*Initialize Efficiency table for each of the CFI indices. The
2371 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2372 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[1];
2373 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[1];
2374 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[1];
2375 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[1];
2376 /*Initialize Efficiency table for Tx Antenna Port Index 1 */
2377 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[1];
2378 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[1];
2379 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[1];
2380 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[1];
2381 /*Initialize Efficiency table for Tx Antenna Port Index 2 */
2382 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[1];
2383 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[1];
2384 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[1];
2385 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[1];
2387 /*Initialize CQI to TBS table for Layer Index 1 for Normal CP */
2388 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[1];
2389 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[1];
2390 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[1];
2391 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[1];
2393 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2395 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2397 /* EfficiencyTbl calculation incase of 2 layers for normal CP */
2398 rgSCHCmnCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx,\
2399 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i]);
2400 rgSCHCmn2LyrCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx, \
2401 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i]);
2405 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2407 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2409 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i];
2410 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][i];
2411 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2412 (j >= 0) && (k > 0); --j)
2414 /* ADD CQI to MCS mapping correction
2415 * single dimensional array is replaced by 2 dimensions for different CFI*/
2416 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2418 (*tbsTbl)[k--] = (U8)j;
2425 /* effTbl,tbsTbl calculation incase of 2 layers for normal CP */
2426 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i];
2427 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][i];
2428 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2429 (j >= 0) && (k > 0); --j)
2431 /* ADD CQI to MCS mapping correction
2432 * single dimensional array is replaced by 2 dimensions for different CFI*/
2433 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2435 (*tbsTbl)[k--] = (U8)j;
2445 /* Efficiency Table for Extended CP */
2446 /*Initialize Efficiency table for Layer Index 0 */
2447 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2448 /*Initialize Efficiency table for each of the CFI indices. The
2449 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2450 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[0];
2451 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[0];
2452 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[0];
2453 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[0];
2454 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2455 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[0];
2456 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[0];
2457 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[0];
2458 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[0];
2459 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2460 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[0];
2461 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[0];
2462 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[0];
2463 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[0];
2465 /*Initialize CQI to TBS table for Layer Index 0 for Extended CP */
2466 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[0];
2467 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[0];
2468 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[0];
2469 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[0];
2471 /*Initialize Efficiency table for Layer Index 1 */
2472 /*Initialize Efficiency table for each of the CFI indices. The
2473 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2474 /*Initialize Efficency table for Tx Antenna Port Index 0 */
2475 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[1];
2476 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[1];
2477 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[1];
2478 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[1];
2479 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2480 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[1];
2481 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[1];
2482 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[1];
2483 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[1];
2484 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2485 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[1];
2486 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[1];
2487 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[1];
2488 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[1];
2490 /*Initialize CQI to TBS table for Layer Index 1 for Extended CP */
2491 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[1];
2492 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[1];
2493 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[1];
2494 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[1];
2495 /* Activate this code when extended cp is supported */
2496 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2498 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2500 /* EfficiencyTbl calculation incase of 2 layers for extendedl CP */
2501 rgSCHCmnCompEff( (U8)(i + 1 ), (U8)RG_SCH_CMN_EXT_CP, idx,\
2502 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i]);
2503 rgSCHCmn2LyrCompEff((U8)(i + 1), (U8) RG_SCH_CMN_EXT_CP,idx, \
2504 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i]);
2508 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2510 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2512 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i];
2513 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][i];
2514 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2515 (j >= 0) && (k > 0); --j)
2517 /* ADD CQI to MCS mapping correction
2518 * single dimensional array is replaced by 2 dimensions for different CFI*/
2519 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2521 (*tbsTbl)[k--] = (U8)j;
2528 /* effTbl,tbsTbl calculation incase of 2 layers for extended CP */
2529 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i];
2530 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][i];
2531 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2532 (j >= 0) && (k > 0); --j)
2534 /* ADD CQI to MCS mapping correction
2535 * single dimensional array is replaced by 2 dimensions for different CFI*/
2536 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2538 (*tbsTbl)[k--] = (U8)j;
2551 * @brief This function initializes all the data for the scheduler.
2555 * Function: rgSCHCmnUlInit
2556 * Purpose: This function initializes the following information:
2557 * 1. Efficiency table
2558 * 2. CQI to table index - It is one row for upto 3 RBs
2559 * and another row for greater than 3 RBs
2560 * currently extended prefix is compiled out.
2561 * Invoked by: MAC intialization code..may be ActvInit
2567 PRIVATE Void rgSCHCmnUlInit
2571 PRIVATE Void rgSCHCmnUlInit()
2574 U8 *mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_NOR_CP][0];
2575 RgSchCmnTbSzEff *effTbl = &rgSchCmnNorUlEff[0];
2576 CONSTANT RgSchCmnUlCqiInfo *cqiTbl = &rgSchCmnUlCqiTbl[0];
2579 TRC2(rgSCHCmnUlInit);
2581 /* Initaializing new variable added for UL eff */
2582 rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP] = &rgSchCmnNorUlEff[0];
2583 /* Reason behind using 3 as the number of symbols to rule out for
2584 * efficiency table computation would be that we are using 2 symbols for
2585 * DMRS(1 in each slot) and 1 symbol for SRS*/
2586 rgSCHCmnCompUlEff(RGSCH_UL_SYM_DMRS_SRS,RG_SCH_CMN_NOR_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP]);
2588 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2589 i >= 0 && j > 0; --i)
2591 if ((*effTbl)[i] <= cqiTbl[j].eff)
2593 mapTbl[j--] = (U8)i;
2600 effTbl = &rgSchCmnExtUlEff[0];
2601 mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_EXT_CP][0];
2603 /* Initaializing new variable added for UL eff */
2604 rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP] = &rgSchCmnExtUlEff[0];
2605 /* Reason behind using 3 as the number of symbols to rule out for
2606 * efficiency table computation would be that we are using 2 symbols for
2607 * DMRS(1 in each slot) and 1 symbol for SRS*/
2608 rgSCHCmnCompUlEff(3,RG_SCH_CMN_EXT_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP]);
2610 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2611 i >= 0 && j > 0; --i)
2613 if ((*effTbl)[i] <= cqiTbl[j].eff)
2615 mapTbl[j--] = (U8)i;
2627 * @brief This function initializes all the data for the scheduler.
2631 * Function: rgSCHCmnInit
2632 * Purpose: This function initializes the following information:
2633 * 1. Efficiency table
2634 * 2. CQI to table index - It is one row for upto 3 RBs
2635 * and another row for greater than 3 RBs
2636 * currently extended prefix is compiled out.
2637 * Invoked by: MAC intialization code..may be ActvInit
2643 PUBLIC Void rgSCHCmnInit
2647 PUBLIC Void rgSCHCmnInit()
2656 rgSCHEmtcCmnDlInit();
2657 rgSCHEmtcCmnUlInit();
2663 /* Init the function pointers */
2664 rgSchCmnApis.rgSCHRgrUeCfg = rgSCHCmnRgrUeCfg;
2665 rgSchCmnApis.rgSCHRgrUeRecfg = rgSCHCmnRgrUeRecfg;
2666 rgSchCmnApis.rgSCHFreeUe = rgSCHCmnUeDel;
2667 rgSchCmnApis.rgSCHRgrCellCfg = rgSCHCmnRgrCellCfg;
2668 rgSchCmnApis.rgSCHRgrCellRecfg = rgSCHCmnRgrCellRecfg;
2669 rgSchCmnApis.rgSCHFreeCell = rgSCHCmnCellDel;
2670 rgSchCmnApis.rgSCHRgrLchCfg = rgSCHCmnRgrLchCfg;
2671 rgSchCmnApis.rgSCHRgrLcgCfg = rgSCHCmnRgrLcgCfg;
2672 rgSchCmnApis.rgSCHRgrLchRecfg = rgSCHCmnRgrLchRecfg;
2673 rgSchCmnApis.rgSCHRgrLcgRecfg = rgSCHCmnRgrLcgRecfg;
2674 rgSchCmnApis.rgSCHFreeDlLc = rgSCHCmnFreeDlLc;
2675 rgSchCmnApis.rgSCHFreeLcg = rgSCHCmnLcgDel;
2676 rgSchCmnApis.rgSCHRgrLchDel = rgSCHCmnRgrLchDel;
2677 rgSchCmnApis.rgSCHActvtUlUe = rgSCHCmnActvtUlUe;
2678 rgSchCmnApis.rgSCHActvtDlUe = rgSCHCmnActvtDlUe;
2679 rgSchCmnApis.rgSCHHdlUlTransInd = rgSCHCmnHdlUlTransInd;
2680 rgSchCmnApis.rgSCHDlDedBoUpd = rgSCHCmnDlDedBoUpd;
2681 rgSchCmnApis.rgSCHUlRecMsg3Alloc = rgSCHCmnUlRecMsg3Alloc;
2682 rgSchCmnApis.rgSCHUlCqiInd = rgSCHCmnUlCqiInd;
2683 rgSchCmnApis.rgSCHPucchDeltaPwrInd = rgSCHPwrPucchDeltaInd;
2684 rgSchCmnApis.rgSCHUlHqProcForUe = rgSCHCmnUlHqProcForUe;
2686 rgSchCmnApis.rgSCHUpdUlHqProc = rgSCHCmnUpdUlHqProc;
2688 rgSchCmnApis.rgSCHUpdBsrShort = rgSCHCmnUpdBsrShort;
2689 rgSchCmnApis.rgSCHUpdBsrTrunc = rgSCHCmnUpdBsrTrunc;
2690 rgSchCmnApis.rgSCHUpdBsrLong = rgSCHCmnUpdBsrLong;
2691 rgSchCmnApis.rgSCHUpdPhr = rgSCHCmnUpdPhr;
2692 rgSchCmnApis.rgSCHUpdExtPhr = rgSCHCmnUpdExtPhr;
2693 rgSchCmnApis.rgSCHContResUlGrant = rgSCHCmnContResUlGrant;
2694 rgSchCmnApis.rgSCHSrRcvd = rgSCHCmnSrRcvd;
2695 rgSchCmnApis.rgSCHFirstRcptnReq = rgSCHCmnFirstRcptnReq;
2696 rgSchCmnApis.rgSCHNextRcptnReq = rgSCHCmnNextRcptnReq;
2697 rgSchCmnApis.rgSCHFirstHqFdbkAlloc = rgSCHCmnFirstHqFdbkAlloc;
2698 rgSchCmnApis.rgSCHNextHqFdbkAlloc = rgSCHCmnNextHqFdbkAlloc;
2699 rgSchCmnApis.rgSCHDlProcAddToRetx = rgSCHCmnDlProcAddToRetx;
2700 rgSchCmnApis.rgSCHDlCqiInd = rgSCHCmnDlCqiInd;
2702 rgSchCmnApis.rgSCHUlProcAddToRetx = rgSCHCmnEmtcUlProcAddToRetx;
2705 rgSchCmnApis.rgSCHSrsInd = rgSCHCmnSrsInd;
2707 rgSchCmnApis.rgSCHDlTARpt = rgSCHCmnDlTARpt;
2708 rgSchCmnApis.rgSCHDlRlsSubFrm = rgSCHCmnDlRlsSubFrm;
2709 rgSchCmnApis.rgSCHUeReset = rgSCHCmnUeReset;
2711 rgSchCmnApis.rgSCHHdlCrntiCE = rgSCHCmnHdlCrntiCE;
2712 rgSchCmnApis.rgSCHDlProcAck = rgSCHCmnDlProcAck;
2713 rgSchCmnApis.rgSCHDlRelPdcchFbk = rgSCHCmnDlRelPdcchFbk;
2714 rgSchCmnApis.rgSCHUlSpsRelInd = rgSCHCmnUlSpsRelInd;
2715 rgSchCmnApis.rgSCHUlSpsActInd = rgSCHCmnUlSpsActInd;
2716 rgSchCmnApis.rgSCHUlCrcFailInd = rgSCHCmnUlCrcFailInd;
2717 rgSchCmnApis.rgSCHUlCrcInd = rgSCHCmnUlCrcInd;
2719 rgSchCmnApis.rgSCHDrxStrtInActvTmrInUl = rgSCHCmnDrxStrtInActvTmrInUl;
2720 rgSchCmnApis.rgSCHUpdUeDataIndLcg = rgSCHCmnUpdUeDataIndLcg;
2722 for (idx = 0; idx < RGSCH_NUM_SCHEDULERS; ++idx)
2724 rgSchUlSchdInits[idx](&rgSchUlSchdTbl[idx]);
2725 rgSchDlSchdInits[idx](&rgSchDlSchdTbl[idx]);
2728 for (idx = 0; idx < RGSCH_NUM_EMTC_SCHEDULERS; ++idx)
2730 rgSchEmtcUlSchdInits[idx](&rgSchEmtcUlSchdTbl[idx]);
2731 rgSchEmtcDlSchdInits[idx](&rgSchEmtcDlSchdTbl[idx]);
2734 #if (defined (RG_PHASE2_SCHED) && defined(TFU_UPGRADE))
2735 for (idx = 0; idx < RGSCH_NUM_DLFS_SCHEDULERS; ++idx)
2737 rgSchDlfsSchdInits[idx](&rgSchDlfsSchdTbl[idx]);
2741 rgSchCmnApis.rgSCHRgrSCellUeCfg = rgSCHCmnRgrSCellUeCfg;
2742 rgSchCmnApis.rgSCHRgrSCellUeDel = rgSCHCmnRgrSCellUeDel;
2749 * @brief This function is a wrapper to call scheduler specific API.
2753 * Function: rgSCHCmnDlRlsSubFrm
2754 * Purpose: Releases scheduler Information from DL SubFrm.
2758 * @param[in] RgSchCellCb *cell
2759 * @param[out] CmLteTimingInfo frm
2764 PUBLIC Void rgSCHCmnDlRlsSubFrm
2770 PUBLIC Void rgSCHCmnDlRlsSubFrm(cell, frm)
2772 CmLteTimingInfo frm;
2775 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2778 TRC2(rgSCHCmnDlRlsSubFrm);
2780 /* Get the pointer to the subframe */
2781 sf = rgSCHUtlSubFrmGet(cell, frm);
2783 rgSCHUtlSubFrmPut(cell, sf);
2786 /* Re-initialize DLFS specific information for the sub-frame */
2787 cellSch->apisDlfs->rgSCHDlfsReinitSf(cell, sf);
2795 * @brief This function is the starting function for DL allocation.
2799 * Function: rgSCHCmnDlCmnChAlloc
2800 * Purpose: Scheduling for downlink. It performs allocation in the order
2801 * of priority wich BCCH/PCH first, CCCH, Random Access and TA.
2803 * Invoked by: Scheduler
2805 * @param[in] RgSchCellCb* cell
2806 * @param[out] RgSchCmnDlRbAllocInfo* allocInfo
2811 PRIVATE Void rgSCHCmnDlCcchRarAlloc
2816 PRIVATE Void rgSCHCmnDlCcchRarAlloc(cell)
2820 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2822 TRC2(rgSCHCmnDlCcchRarAlloc);
2824 rgSCHCmnDlCcchRetx(cell, &cellSch->allocInfo);
2825 /* LTE_ADV_FLAG_REMOVED_START */
2826 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2828 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2830 /*eNodeB need to blank the subframe */
2834 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2839 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2841 /* LTE_ADV_FLAG_REMOVED_END */
2845 /*Added these function calls for processing CCCH SDU arriving
2846 * after guard timer expiry.Functions differ from above two functions
2847 * in using ueCb instead of raCb.*/
2848 rgSCHCmnDlCcchSduRetx(cell, &cellSch->allocInfo);
2849 /* LTE_ADV_FLAG_REMOVED_START */
2850 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2852 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2854 /*eNodeB need to blank the subframe */
2858 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2863 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2865 /* LTE_ADV_FLAG_REMOVED_END */
2869 if(cellSch->ul.msg3SchdIdx != RGSCH_INVALID_INFO)
2871 /* Do not schedule msg3 if there is a CFI change ongoing */
2872 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2874 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2878 /* LTE_ADV_FLAG_REMOVED_START */
2879 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2881 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2883 /*eNodeB need to blank the subframe */
2887 /* Do not schedule msg3 if there is a CFI change ongoing */
2888 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2890 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2896 /* Do not schedule msg3 if there is a CFI change ongoing */
2897 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2899 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2902 /* LTE_ADV_FLAG_REMOVED_END */
2910 * @brief Scheduling for CCCH SDU.
2914 * Function: rgSCHCmnCcchSduAlloc
2915 * Purpose: Scheduling for CCCH SDU
2917 * Invoked by: Scheduler
2919 * @param[in] RgSchCellCb* cell
2920 * @param[in] RgSchUeCb* ueCb
2921 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2926 PRIVATE S16 rgSCHCmnCcchSduAlloc
2930 RgSchCmnDlRbAllocInfo *allocInfo
2933 PRIVATE S16 rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)
2936 RgSchCmnDlRbAllocInfo *allocInfo;
2939 RgSchDlRbAlloc *rbAllocInfo;
2940 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2941 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
2943 TRC2(rgSCHCmnCcchSduAlloc);
2945 /* Return if subframe BW exhausted */
2946 if (allocInfo->ccchSduAlloc.ccchSduDlSf->bw <=
2947 allocInfo->ccchSduAlloc.ccchSduDlSf->bwAssigned)
2949 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2950 "bw<=bwAssigned for UEID:%d",ueCb->ueId);
2954 if (rgSCHDhmGetCcchSduHqProc(ueCb, cellSch->dl.time, &(ueDl->proc)) != ROK)
2956 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2957 "rgSCHDhmGetCcchSduHqProc failed UEID:%d",ueCb->ueId);
2961 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
2962 rbAllocInfo->dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
2964 if (rgSCHCmnCcchSduDedAlloc(cell, ueCb) != ROK)
2966 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
2967 rgSCHDhmRlsHqpTb(ueDl->proc, 0, FALSE);
2968 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2969 "rgSCHCmnCcchSduDedAlloc failed UEID:%d",ueCb->ueId);
2972 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduTxLst, &ueDl->proc->reqLnk);
2973 ueDl->proc->reqLnk.node = (PTR)ueDl->proc;
2974 allocInfo->ccchSduAlloc.ccchSduDlSf->schdCcchUe++;
2979 * @brief This function scheduler for downlink CCCH messages.
2983 * Function: rgSCHCmnDlCcchSduTx
2984 * Purpose: Scheduling for downlink CCCH
2986 * Invoked by: Scheduler
2988 * @param[in] RgSchCellCb *cell
2989 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2994 PRIVATE Void rgSCHCmnDlCcchSduTx
2997 RgSchCmnDlRbAllocInfo *allocInfo
3000 PRIVATE Void rgSCHCmnDlCcchSduTx(cell, allocInfo)
3002 RgSchCmnDlRbAllocInfo *allocInfo;
3007 RgSchCmnDlUe *ueCmnDl;
3008 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3010 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3012 TRC2(rgSCHCmnDlCcchSduTx);
3014 node = cell->ccchSduUeLst.first;
3017 if(cellSch->dl.maxCcchPerDlSf &&
3018 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3024 ueCb = (RgSchUeCb *)(node->node);
3025 ueCmnDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3027 /* Fix : syed postpone scheduling for this
3028 * until msg4 is done */
3029 /* Fix : syed RLC can erroneously send CCCH SDU BO
3030 * twice. Hence an extra guard to avoid if already
3031 * scheduled for RETX */
3032 if ((!(ueCb->dl.dlInactvMask & RG_HQENT_INACTIVE)) &&
3035 if ((rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)) != ROK)
3042 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"ERROR!! THIS SHOULD "
3043 "NEVER HAPPEN for UEID:%d", ueCb->ueId);
3053 * @brief This function scheduler for downlink CCCH messages.
3057 * Function: rgSCHCmnDlCcchTx
3058 * Purpose: Scheduling for downlink CCCH
3060 * Invoked by: Scheduler
3062 * @param[in] RgSchCellCb *cell
3063 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3068 PRIVATE Void rgSCHCmnDlCcchTx
3071 RgSchCmnDlRbAllocInfo *allocInfo
3074 PRIVATE Void rgSCHCmnDlCcchTx(cell, allocInfo)
3076 RgSchCmnDlRbAllocInfo *allocInfo;
3081 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3082 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3084 TRC2(rgSCHCmnDlCcchTx);
3086 node = cell->raInfo.toBeSchdLst.first;
3089 if(cellSch->dl.maxCcchPerDlSf &&
3090 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3097 raCb = (RgSchRaCb *)(node->node);
3099 /* Address allocation for this UE for MSG 4 */
3100 /* Allocation for Msg4 */
3101 if ((rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)) != ROK)
3112 * @brief This function scheduler for downlink CCCH messages.
3116 * Function: rgSCHCmnDlCcchSduRetx
3117 * Purpose: Scheduling for downlink CCCH
3119 * Invoked by: Scheduler
3121 * @param[in] RgSchCellCb *cell
3122 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3127 PRIVATE Void rgSCHCmnDlCcchSduRetx
3130 RgSchCmnDlRbAllocInfo *allocInfo
3133 PRIVATE Void rgSCHCmnDlCcchSduRetx(cell, allocInfo)
3135 RgSchCmnDlRbAllocInfo *allocInfo;
3138 RgSchDlRbAlloc *rbAllocInfo;
3140 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3142 RgSchDlHqProcCb *hqP;
3145 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3147 TRC2(rgSCHCmnDlCcchSduRetx);
3149 node = cellSch->dl.ccchSduRetxLst.first;
3152 if(cellSch->dl.maxCcchPerDlSf &&
3153 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3160 hqP = (RgSchDlHqProcCb *)(node->node);
3163 /* DwPts Scheduling Changes Start */
3165 if (rgSCHCmnRetxAvoidTdd(allocInfo->ccchSduAlloc.ccchSduDlSf,
3171 /* DwPts Scheduling Changes End */
3173 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3177 ueCb = (RgSchUeCb*)(hqP->hqE->ue);
3178 ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3180 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
3181 /* Fill RB Alloc Info */
3182 rbAllocInfo->dlSf = dlSf;
3183 rbAllocInfo->tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3184 rbAllocInfo->rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3185 /* Fix : syed iMcs setting did not correspond to RETX */
3186 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3187 rbAllocInfo->tbInfo[0].imcs);
3188 rbAllocInfo->rnti = ueCb->ueId;
3189 rbAllocInfo->tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3190 /* Fix : syed Copying info in entirety without depending on stale TX information */
3191 rbAllocInfo->tbInfo[0].tbCb = &hqP->tbInfo[0];
3192 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
3193 /* Fix : syed Assigning proc to scratchpad */
3196 retxBw += rbAllocInfo->rbsReq;
3198 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduRetxLst, \
3200 hqP->reqLnk.node = (PTR)hqP;
3204 dlSf->bwAssigned += retxBw;
3210 * @brief This function scheduler for downlink CCCH messages.
3214 * Function: rgSCHCmnDlCcchRetx
3215 * Purpose: Scheduling for downlink CCCH
3217 * Invoked by: Scheduler
3219 * @param[in] RgSchCellCb *cell
3220 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3225 PRIVATE Void rgSCHCmnDlCcchRetx
3228 RgSchCmnDlRbAllocInfo *allocInfo
3231 PRIVATE Void rgSCHCmnDlCcchRetx(cell, allocInfo)
3233 RgSchCmnDlRbAllocInfo *allocInfo;
3237 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3239 RgSchDlHqProcCb *hqP;
3241 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3243 TRC2(rgSCHCmnDlCcchRetx);
3245 node = cellSch->dl.msg4RetxLst.first;
3248 if(cellSch->dl.maxCcchPerDlSf &&
3249 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3255 hqP = (RgSchDlHqProcCb *)(node->node);
3259 /* DwPts Scheduling Changes Start */
3261 if (rgSCHCmnRetxAvoidTdd(allocInfo->msg4Alloc.msg4DlSf,
3267 /* DwPts Scheduling Changes End */
3269 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3273 raCb = (RgSchRaCb*)(hqP->hqE->raCb);
3274 /* Fill RB Alloc Info */
3275 raCb->rbAllocInfo.dlSf = dlSf;
3276 raCb->rbAllocInfo.tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3277 raCb->rbAllocInfo.rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3278 /* Fix : syed iMcs setting did not correspond to RETX */
3279 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3280 raCb->rbAllocInfo.tbInfo[0].imcs);
3281 raCb->rbAllocInfo.rnti = raCb->tmpCrnti;
3282 raCb->rbAllocInfo.tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3283 /* Fix; syed Copying info in entirety without depending on stale TX information */
3284 raCb->rbAllocInfo.tbInfo[0].tbCb = &hqP->tbInfo[0];
3285 raCb->rbAllocInfo.tbInfo[0].schdlngForTb = TRUE;
3287 retxBw += raCb->rbAllocInfo.rbsReq;
3289 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4RetxLst, \
3291 hqP->reqLnk.node = (PTR)hqP;
3295 dlSf->bwAssigned += retxBw;
3301 * @brief This function implements scheduler DL allocation for
3302 * for broadcast (on PDSCH) and paging.
3306 * Function: rgSCHCmnDlBcchPcch
3307 * Purpose: This function implements scheduler for DL allocation
3308 * for broadcast (on PDSCH) and paging.
3310 * Invoked by: Scheduler
3312 * @param[in] RgSchCellCb* cell
3318 PRIVATE Void rgSCHCmnDlBcchPcch
3321 RgSchCmnDlRbAllocInfo *allocInfo,
3322 RgInfSfAlloc *subfrmAlloc
3325 PRIVATE Void rgSCHCmnDlBcchPcch(cell, allocInfo, subfrmAlloc)
3327 RgSchCmnDlRbAllocInfo *allocInfo;
3328 RgInfSfAlloc *subfrmAlloc;
3331 CmLteTimingInfo frm;
3333 RgSchClcDlLcCb *pcch;
3337 RgSchClcDlLcCb *bcch, *bch;
3338 #endif/*RGR_SI_SCH*/
3341 TRC2(rgSCHCmnDlBcchPcch);
3343 frm = cell->crntTime;
3345 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
3346 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
3347 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
3349 RGSCH_SUBFRAME_INDEX(frm);
3350 //RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
3353 /* Compute the subframe for which allocation is being made */
3354 /* essentially, we need pointer to the dl frame for this subframe */
3355 sf = rgSCHUtlSubFrmGet(cell, frm);
3359 bch = rgSCHDbmGetBcchOnBch(cell);
3360 #if (ERRCLASS & ERRCLS_DEBUG)
3363 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on BCH is not configured");
3367 if (bch->boLst.first != NULLP)
3369 bo = (RgSchClcBoRpt *)(bch->boLst.first->node);
3370 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3372 sf->bch.tbSize = bo->bo;
3373 cmLListDelFrm(&bch->boLst, bch->boLst.first);
3374 /* ccpu00117052 - MOD - Passing double pointer
3375 for proper NULLP assignment*/
3376 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(*bo));
3377 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, bch->lcId,TRUE);
3382 if ((frm.sfn % 4 == 0) && (frm.subframe == 0))
3387 allocInfo->bcchAlloc.schdFirst = FALSE;
3388 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
3389 #if (ERRCLASS & ERRCLS_DEBUG)
3392 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3396 if (bcch->boLst.first != NULLP)
3398 bo = (RgSchClcBoRpt *)(bcch->boLst.first->node);
3400 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3402 allocInfo->bcchAlloc.schdFirst = TRUE;
3403 /* Time to perform allocation for this BCCH transmission */
3404 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3408 if(!allocInfo->bcchAlloc.schdFirst)
3411 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
3412 #if (ERRCLASS & ERRCLS_DEBUG)
3415 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3419 lnk = bcch->boLst.first;
3420 while (lnk != NULLP)
3422 bo = (RgSchClcBoRpt *)(lnk->node);
3424 valid = rgSCHCmnChkInWin(frm, bo->timeToTx, bo->maxTimeToTx);
3428 bo->i = RGSCH_CALC_SF_DIFF(frm, bo->timeToTx);
3429 /* Time to perform allocation for this BCCH transmission */
3430 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3435 valid = rgSCHCmnChkPastWin(frm, bo->maxTimeToTx);
3438 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
3439 /* ccpu00117052 - MOD - Passing double pointer
3440 for proper NULLP assignment*/
3441 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo,
3442 sizeof(RgSchClcBoRpt));
3448 rgSCHDlSiSched(cell, allocInfo, subfrmAlloc);
3449 #endif/*RGR_SI_SCH*/
3451 pcch = rgSCHDbmGetPcch(cell);
3455 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"PCCH on DLSCH is not configured");
3459 if (pcch->boLst.first != NULLP)
3461 bo = (RgSchClcBoRpt *)(pcch->boLst.first->node);
3463 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3465 /* Time to perform allocation for this PCCH transmission */
3466 rgSCHCmnClcAlloc(cell, sf, pcch, RGSCH_P_RNTI, allocInfo);
3474 * Fun: rgSCHCmnChkInWin
3476 * Desc: This function checks if frm occurs in window
3478 * Ret: TRUE - if in window
3483 * File: rg_sch_cmn.c
3487 PUBLIC Bool rgSCHCmnChkInWin
3489 CmLteTimingInfo frm,
3490 CmLteTimingInfo start,
3494 PUBLIC Bool rgSCHCmnChkInWin(frm, start, end)
3495 CmLteTimingInfo frm;
3496 CmLteTimingInfo start;
3497 CmLteTimingInfo end;
3502 TRC2(rgSCHCmnChkInWin);
3504 if (end.sfn > start.sfn)
3506 if (frm.sfn > start.sfn
3507 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3509 if (frm.sfn < end.sfn
3511 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3513 || (frm.sfn == end.sfn && frm.slot <= start.slot))
3520 /* Testing for wrap around, sfn wraparound check should be enough */
3521 else if (end.sfn < start.sfn)
3523 if (frm.sfn > start.sfn
3524 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3530 if (frm.sfn < end.sfn
3531 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3537 else /* start.sfn == end.sfn */
3539 if (frm.sfn == start.sfn
3540 && (frm.slot >= start.slot
3541 && frm.slot <= end.slot))
3548 } /* end of rgSCHCmnChkInWin*/
3552 * Fun: rgSCHCmnChkPastWin
3554 * Desc: This function checks if frm has gone past window edge
3556 * Ret: TRUE - if past window edge
3561 * File: rg_sch_cmn.c
3565 PUBLIC Bool rgSCHCmnChkPastWin
3567 CmLteTimingInfo frm,
3571 PUBLIC Bool rgSCHCmnChkPastWin(frm, end)
3572 CmLteTimingInfo frm;
3573 CmLteTimingInfo end;
3576 CmLteTimingInfo refFrm = end;
3579 TRC2(rgSCHCmnChkPastWin);
3581 RGSCH_INCR_FRAME(refFrm.sfn);
3582 RGSCH_INCR_SUB_FRAME(end, 1);
3583 pastWin = rgSCHCmnChkInWin(frm, end, refFrm);
3586 } /* end of rgSCHCmnChkPastWin*/
3589 * @brief This function implements allocation of the resources for common
3590 * channels BCCH, PCCH.
3594 * Function: rgSCHCmnClcAlloc
3595 * Purpose: This function implements selection of number of RBs based
3596 * the allowed grant for the service. It is also responsible
3597 * for selection of MCS for the transmission.
3599 * Invoked by: Scheduler
3601 * @param[in] RgSchCellCb *cell,
3602 * @param[in] RgSchDlSf *sf,
3603 * @param[in] RgSchClcDlLcCb *lch,
3604 * @param[in] U16 rnti,
3605 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3610 PRIVATE Void rgSCHCmnClcAlloc
3614 RgSchClcDlLcCb *lch,
3616 RgSchCmnDlRbAllocInfo *allocInfo
3619 PRIVATE Void rgSCHCmnClcAlloc(cell, sf, lch, rnti, allocInfo)
3622 RgSchClcDlLcCb *lch;
3624 RgSchCmnDlRbAllocInfo *allocInfo;
3627 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
3634 U8 cfi = cellDl->currCfi;
3637 TRC2(rgSCHCmnClcAlloc);
3639 bo = (RgSchClcBoRpt *)(lch->boLst.first->node);
3643 /* rgSCHCmnClcRbAllocForFxdTb(cell, bo->bo, cellDl->ccchCqi, &rb);*/
3644 if(cellDl->bitsPerRb==0)
3646 while ((rgTbSzTbl[0][0][rb]) < (tbs*8))
3654 rb = RGSCH_CEIL((tbs*8), cellDl->bitsPerRb);
3656 /* DwPTS Scheduling Changes Start */
3658 if(sf->sfType == RG_SCH_SPL_SF_DATA)
3660 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
3662 /* Calculate the less RE's because of DwPTS */
3663 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
3665 /* Increase number of RBs in Spl SF to compensate for lost REs */
3666 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
3669 /* DwPTS Scheduling Changes End */
3670 /*ccpu00115595- end*/
3671 /* additional check to see if required RBs
3672 * exceeds the available */
3673 if (rb > sf->bw - sf->bwAssigned)
3675 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"BW allocation "
3676 "failed for CRNTI:%d",rnti);
3680 /* Update the subframe Allocated BW field */
3681 sf->bwAssigned = sf->bwAssigned + rb;
3682 /* Fill in the BCCH/PCCH transmission info to the RBAllocInfo struct */
3683 if (rnti == RGSCH_SI_RNTI)
3685 allocInfo->bcchAlloc.rnti = rnti;
3686 allocInfo->bcchAlloc.dlSf = sf;
3687 allocInfo->bcchAlloc.tbInfo[0].bytesReq = tbs;
3688 allocInfo->bcchAlloc.rbsReq = rb;
3689 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
3690 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
3691 /* Nprb indication at PHY for common Ch */
3692 allocInfo->bcchAlloc.nPrb = bo->nPrb;
3696 allocInfo->pcchAlloc.rnti = rnti;
3697 allocInfo->pcchAlloc.dlSf = sf;
3698 allocInfo->pcchAlloc.tbInfo[0].bytesReq = tbs;
3699 allocInfo->pcchAlloc.rbsReq = rb;
3700 allocInfo->pcchAlloc.tbInfo[0].imcs = mcs;
3701 allocInfo->pcchAlloc.tbInfo[0].noLyr = 1;
3702 allocInfo->pcchAlloc.nPrb = bo->nPrb;
3709 * @brief This function implements PDCCH allocation for common channels.
3713 * Function: rgSCHCmnCmnPdcchAlloc
3714 * Purpose: This function implements allocation of PDCCH for a UE.
3715 * 1. This uses index 0 of PDCCH table for efficiency.
3716 * 2. Uses he candidate PDCCH count for the aggr level.
3717 * 3. Look for availability for each candidate and choose
3718 * the first one available.
3720 * Invoked by: Scheduler
3722 * @param[in] RgSchCellCb *cell
3723 * @param[in] RgSchDlSf *sf
3724 * @return RgSchPdcch *
3725 * -# NULLP when unsuccessful
3729 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc
3735 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc(cell, subFrm)
3740 CmLteAggrLvl aggrLvl;
3741 RgSchPdcchInfo *pdcchInfo;
3743 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3744 U8 numCce; /*store num CCEs based on
3745 aggregation level */
3746 TRC2(rgSCHCmnCmnPdcchAlloc);
3748 aggrLvl = cellSch->dl.cmnChAggrLvl;
3750 pdcchInfo = &(subFrm->pdcchInfo);
3752 /* Updating the no. of nCce in pdcchInfo, in case if CFI
3755 if(subFrm->nCce != pdcchInfo->nCce)
3757 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
3760 if(cell->nCce != pdcchInfo->nCce)
3762 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
3768 case CM_LTE_AGGR_LVL4:
3771 case CM_LTE_AGGR_LVL8:
3774 case CM_LTE_AGGR_LVL16:
3781 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
3784 pdcch->isSpsRnti = FALSE;
3786 /* Increment the CCE used counter in the current subframe */
3787 subFrm->cceCnt += numCce;
3788 pdcch->pdcchSearchSpace = RG_SCH_CMN_SEARCH_SPACE;
3793 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
3794 subFrm->isCceFailure = TRUE;
3796 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
3797 "PDCCH ERR: NO PDDCH AVAIL IN COMMON SEARCH SPACE aggr:%u",
3804 * @brief This function implements bandwidth allocation for common channels.
3808 * Function: rgSCHCmnClcRbAlloc
3809 * Purpose: This function implements bandwith allocation logic
3810 * for common control channels.
3812 * Invoked by: Scheduler
3814 * @param[in] RgSchCellCb* cell
3818 * @param[in] U32 *tbs
3819 * @param[in] U8 *mcs
3820 * @param[in] RgSchDlSf *sf
3826 PUBLIC Void rgSCHCmnClcRbAlloc
3839 PUBLIC Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, iTbs, isSpsBo)
3852 PRIVATE Void rgSCHCmnClcRbAlloc
3863 PRIVATE Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, sf)
3872 #endif /* LTEMAC_SPS */
3875 RgSchCmnTbSzEff *effTbl;
3878 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3879 U8 cfi = cellSch->dl.currCfi;
3881 TRC2(rgSCHCmnClcRbAlloc);
3883 /* first get the CQI to MCS table and determine the number of RBs */
3884 effTbl = (RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]);
3885 iTbsVal = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[cqi];
3886 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3888 /* Efficiency is number of bits per 1024 REs */
3889 eff = (*effTbl)[iTbsVal];
3891 /* Get the number of REs needed for this bo */
3892 noRes = ((bo * 8 * 1024) / eff );
3894 /* Get the number of RBs needed for this transmission */
3895 /* Number of RBs = No of REs / No of REs per RB */
3896 tmpRb = RGSCH_CEIL(noRes, cellSch->dl.noResPerRb[cfi]);
3897 /* KWORK_FIX: added check to see if rb has crossed maxRb*/
3898 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3899 if (tmpRb > cellSch->dl.maxDlBwPerUe)
3901 tmpRb = cellSch->dl.maxDlBwPerUe;
3903 while ((rgTbSzTbl[0][iTbsVal][tmpRb-1]/8) < bo &&
3904 (tmpRb < cellSch->dl.maxDlBwPerUe))
3907 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3909 *tbs = rgTbSzTbl[0][iTbsVal][tmpRb-1]/8;
3911 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3919 * @brief Scheduling for MSG4.
3923 * Function: rgSCHCmnMsg4Alloc
3924 * Purpose: Scheduling for MSG4
3926 * Invoked by: Scheduler
3928 * @param[in] RgSchCellCb* cell
3929 * @param[in] RgSchRaCb* raCb
3930 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3935 PRIVATE S16 rgSCHCmnMsg4Alloc
3939 RgSchCmnDlRbAllocInfo *allocInfo
3942 PRIVATE S16 rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)
3945 RgSchCmnDlRbAllocInfo *allocInfo;
3948 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3950 TRC2(rgSCHCmnMsg4Alloc);
3952 /* SR_RACH_STATS : MSG4 TO BE TXED */
3954 /* Return if subframe BW exhausted */
3955 if (allocInfo->msg4Alloc.msg4DlSf->bw <=
3956 allocInfo->msg4Alloc.msg4DlSf->bwAssigned)
3958 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId ,
3963 if (rgSCHDhmGetMsg4HqProc(raCb, cellSch->dl.time) != ROK)
3965 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3966 "rgSCHDhmGetMsg4HqProc failed");
3970 raCb->rbAllocInfo.dlSf = allocInfo->msg4Alloc.msg4DlSf;
3972 if (rgSCHCmnMsg4DedAlloc(cell, raCb) != ROK)
3974 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
3975 rgSCHDhmRlsHqpTb(raCb->dlHqE->msg4Proc, 0, FALSE);
3976 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3977 "rgSCHCmnMsg4DedAlloc failed.");
3980 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4TxLst, &raCb->dlHqE->msg4Proc->reqLnk);
3981 raCb->dlHqE->msg4Proc->reqLnk.node = (PTR)raCb->dlHqE->msg4Proc;
3982 allocInfo->msg4Alloc.msg4DlSf->schdCcchUe++;
3989 * @brief This function implements PDCCH allocation for an UE.
3993 * Function: PdcchAlloc
3994 * Purpose: This function implements allocation of PDCCH for an UE.
3995 * 1. Get the aggregation level for the CQI of the UE.
3996 * 2. Get the candidate PDCCH count for the aggr level.
3997 * 3. Look for availability for each candidate and choose
3998 * the first one available.
4000 * Invoked by: Scheduler
4005 * @param[in] dciFrmt
4006 * @return RgSchPdcch *
4007 * -# NULLP when unsuccessful
4011 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc
4017 TfuDciFormat dciFrmt,
4021 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc(cell, subFrm, cqi, dciFrmt, isDtx)
4026 TfuDciFormat dciFrmt;
4030 CmLteAggrLvl aggrLvl;
4031 RgSchPdcchInfo *pdcchInfo;
4034 TRC2(rgSCHCmnPdcchAlloc);
4036 /* 3.1 consider the selected DCI format size in determining the
4037 * aggregation level */
4038 //TODO_SID Need to update. Currently using 4 aggregation level
4039 aggrLvl = CM_LTE_AGGR_LVL2;//cellSch->dciAggrLvl[cqi][dciFrmt];
4042 if((dciFrmt == TFU_DCI_FORMAT_1A) &&
4043 ((ue) && (ue->allocCmnUlPdcch)) )
4045 pdcch = rgSCHCmnCmnPdcchAlloc(cell, subFrm);
4046 /* Since CRNTI Scrambled */
4049 pdcch->dciNumOfBits = ue->dciSize.cmnSize[dciFrmt];
4050 // prc_trace_format_string(PRC_TRACE_GROUP_PS, PRC_TRACE_INFO_LOW,"Forcing alloc in CMN search spc size %d fmt %d \n",
4051 // pdcch->dciNumOfBits, dciFrmt);
4057 /* Incrementing aggrLvl by 1 if it not AGGR_LVL8(MAX SIZE)
4058 * inorder to increse the redudancy bits for better decoding of UE */
4061 if (aggrLvl != CM_LTE_AGGR_LVL16)
4065 case CM_LTE_AGGR_LVL2:
4066 aggrLvl = CM_LTE_AGGR_LVL4;
4068 case CM_LTE_AGGR_LVL4:
4069 aggrLvl = CM_LTE_AGGR_LVL8;
4071 case CM_LTE_AGGR_LVL8:
4072 aggrLvl = CM_LTE_AGGR_LVL16;
4081 pdcchInfo = &subFrm->pdcchInfo;
4083 /* Updating the no. of nCce in pdcchInfo, in case if CFI
4086 if(subFrm->nCce != pdcchInfo->nCce)
4088 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
4091 if(cell->nCce != pdcchInfo->nCce)
4093 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
4097 if (pdcchInfo->nCce < (1 << (aggrLvl - 1)))
4099 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4100 subFrm->isCceFailure = TRUE;
4101 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4102 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4108 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
4110 /* SR_RACH_STATS : Reset isTBMsg4 */
4111 pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4= FALSE;
4112 pdcch->dci.u.format0Info.isSrGrant = FALSE;
4114 pdcch->isSpsRnti = FALSE;
4116 /* Increment the CCE used counter in the current subframe */
4117 subFrm->cceCnt += aggrLvl;
4118 pdcch->pdcchSearchSpace = RG_SCH_UE_SPECIFIC_SEARCH_SPACE;
4122 if (ue->cell != cell)
4124 /* Secondary Cell */
4125 //pdcch->dciNumOfBits = ue->dciSize.noUlCcSize[dciFrmt];
4126 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4131 //pdcch->dciNumOfBits = ue->dciSize.dedSize[dciFrmt];
4132 //TODO_SID Need to update dci size.
4133 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4139 pdcch->dciNumOfBits = cell->dciSize.size[dciFrmt];
4144 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4145 subFrm->isCceFailure = TRUE;
4147 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4148 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4155 * @brief This function implements BW allocation for CCCH SDU
4159 * Function: rgSCHCmnCcchSduDedAlloc
4160 * Purpose: Downlink bandwidth Allocation for CCCH SDU.
4162 * Invoked by: Scheduler
4164 * @param[in] RgSchCellCb* cell
4165 * @param[out] RgSchUeCb *ueCb
4170 PRIVATE S16 rgSCHCmnCcchSduDedAlloc
4176 PRIVATE S16 rgSCHCmnCcchSduDedAlloc(cell, ueCb)
4181 RgSchDlHqEnt *hqE = NULLP;
4183 RgSchDlRbAlloc *rbAllocinfo = NULLP;
4184 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4188 U8 cfi = cellDl->currCfi;
4191 TRC2(rgSCHCmnCcchSduDedAlloc);
4193 rbAllocinfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
4195 effBo = ueCb->dlCcchInfo.bo + RGSCH_CCCH_SDU_HDRSIZE;
4198 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4199 &rbAllocinfo->tbInfo[0].bytesReq,
4200 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4201 #else /* LTEMAC_SPS */
4202 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4203 &rbAllocinfo->tbInfo[0].bytesReq,\
4204 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4206 #endif /* LTEMAC_SPS */
4209 /* Cannot exceed the total number of RBs in the cell */
4210 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4211 rbAllocinfo->dlSf->bwAssigned)))
4213 /* Check if atleast one allocation was possible.
4214 This may be the case where the Bw is very less and
4215 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4216 if (rbAllocinfo->dlSf->bwAssigned == 0)
4218 numRb = rbAllocinfo->dlSf->bw;
4219 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4220 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4224 rbAllocinfo->rbsReq = numRb;
4225 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4226 /* DwPTS Scheduling Changes Start */
4228 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4230 rbAllocinfo->tbInfo[0].bytesReq =
4231 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1,cfi);
4234 /* DwPTS Scheduling Changes End */
4235 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4243 /* Update the subframe Allocated BW field */
4244 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4245 rbAllocinfo->rbsReq;
4246 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
4247 rbAllocinfo->tbInfo[0].tbCb = &hqE->ccchSduProc->tbInfo[0];
4248 rbAllocinfo->rnti = ueCb->ueId;
4249 rbAllocinfo->tbInfo[0].noLyr = 1;
4256 * @brief This function implements BW allocation for MSG4
4260 * Function: rgSCHCmnMsg4DedAlloc
4261 * Purpose: Downlink bandwidth Allocation for MSG4.
4263 * Invoked by: Scheduler
4265 * @param[in] RgSchCellCb* cell
4266 * @param[out] RgSchRaCb *raCb
4271 PRIVATE S16 rgSCHCmnMsg4DedAlloc
4277 PRIVATE S16 rgSCHCmnMsg4DedAlloc(cell, raCb)
4283 RgSchDlRbAlloc *rbAllocinfo = &raCb->rbAllocInfo;
4287 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4288 U8 cfi = cellDl->currCfi;
4291 TRC2(rgSCHCmnMsg4DedAlloc);
4293 effBo = raCb->dlCcchInfo.bo + RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE;
4296 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4297 &rbAllocinfo->tbInfo[0].bytesReq,\
4298 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4299 #else /* LTEMAC_SPS */
4300 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4301 &rbAllocinfo->tbInfo[0].bytesReq,\
4302 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4304 #endif /* LTEMAC_SPS */
4307 /* Cannot exceed the total number of RBs in the cell */
4308 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4309 rbAllocinfo->dlSf->bwAssigned)))
4311 /* Check if atleast one allocation was possible.
4312 This may be the case where the Bw is very less and
4313 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4314 if (rbAllocinfo->dlSf->bwAssigned == 0)
4316 numRb = rbAllocinfo->dlSf->bw;
4317 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4318 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4322 rbAllocinfo->rbsReq = numRb;
4323 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4324 /* DwPTS Scheduling Changes Start */
4326 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4328 rbAllocinfo->tbInfo[0].bytesReq =
4329 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1, cfi);
4332 /* DwPTS Scheduling Changes End */
4333 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4341 /* Update the subframe Allocated BW field */
4342 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4343 rbAllocinfo->rbsReq;
4344 rbAllocinfo->rnti = raCb->tmpCrnti;
4345 rbAllocinfo->tbInfo[0].tbCb = &raCb->dlHqE->msg4Proc->tbInfo[0];
4346 rbAllocinfo->tbInfo[0].schdlngForTb = TRUE;
4347 rbAllocinfo->tbInfo[0].noLyr = 1;
4354 * @brief This function implements scheduling for RA Response.
4358 * Function: rgSCHCmnDlRaRsp
4359 * Purpose: Downlink scheduling for RA responses.
4361 * Invoked by: Scheduler
4363 * @param[in] RgSchCellCb* cell
4368 PRIVATE Void rgSCHCmnDlRaRsp
4371 RgSchCmnDlRbAllocInfo *allocInfo
4374 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4376 RgSchCmnDlRbAllocInfo *allocInfo;
4379 CmLteTimingInfo frm;
4380 CmLteTimingInfo schFrm;
4386 RgSchTddRachRspLst *rachRsp;
4387 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
4391 TRC2(rgSCHCmnDlRaRsp);
4393 frm = cell->crntTime;
4394 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4396 /* Compute the subframe for which allocation is being made */
4397 /* essentially, we need pointer to the dl frame for this subframe */
4398 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4400 /* Get the RACH Response scheduling related information
4401 * for the subframe with RA index */
4402 raIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][frm.subframe]-1;
4404 rachRsp = &cell->rachRspLst[raIdx];
4406 for(sfnIdx = 0; sfnIdx < rachRsp->numRadiofrms; sfnIdx++)
4408 /* For all scheduled RACH Responses in SFNs */
4410 RG_SCH_CMN_DECR_FRAME(schFrm.sfn, rachRsp->rachRsp[sfnIdx].sfnOffset);
4411 /* For all scheduled RACH Responses in subframes */
4413 subfrmIdx < rachRsp->rachRsp[sfnIdx].numSubfrms; subfrmIdx++)
4415 schFrm.subframe = rachRsp->rachRsp[sfnIdx].subframe[subfrmIdx];
4416 /* compute the last RA RNTI used in the previous subframe */
4417 raIdx = (((schFrm.sfn % cell->raInfo.maxRaSize) * \
4418 RGSCH_NUM_SUB_FRAMES * RGSCH_MAX_RA_RNTI_PER_SUBFRM) \
4421 /* For all RA RNTIs within a subframe */
4423 for(i=0; (i < RGSCH_MAX_RA_RNTI_PER_SUBFRM) && \
4424 (noRaRnti < RGSCH_MAX_TDD_RA_RSP_ALLOC); i++)
4426 rarnti = (schFrm.subframe + RGSCH_NUM_SUB_FRAMES*i + 1);
4427 rntiIdx = (raIdx + RGSCH_NUM_SUB_FRAMES*i);
4429 if (cell->raInfo.raReqLst[rntiIdx].first != NULLP)
4431 /* compute the next RA RNTI */
4432 if (rgSCHCmnRaRspAlloc(cell, subFrm, rntiIdx,
4433 rarnti, noRaRnti, allocInfo) != ROK)
4435 /* The resources are exhausted */
4449 * @brief This function implements scheduling for RA Response.
4453 * Function: rgSCHCmnDlRaRsp
4454 * Purpose: Downlink scheduling for RA responses.
4456 * Invoked by: Scheduler
4458 * @param[in] RgSchCellCb* cell
4459 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4464 PRIVATE Void rgSCHCmnDlRaRsp //FDD
4467 RgSchCmnDlRbAllocInfo *allocInfo
4470 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4472 RgSchCmnDlRbAllocInfo *allocInfo;
4475 CmLteTimingInfo frm;
4476 CmLteTimingInfo winStartFrm;
4482 RgSchCmnCell *sched;
4484 TRC2(rgSCHCmnDlRaRsp);
4486 frm = cell->crntTime;
4487 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4489 /* Compute the subframe for which allocation is being made */
4490 /* essentially, we need pointer to the dl frame for this subframe */
4491 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4492 sched = RG_SCH_CMN_GET_CELL(cell);
4494 /* ccpu00132523 - Window Start calculated by considering RAR window size,
4495 * RAR Wait period, Subframes occuppied for respective preamble format*/
4496 winGap = (sched->dl.numRaSubFrms-1) + (cell->rachCfg.raWinSize-1)
4497 +RGSCH_RARSP_WAIT_PERIOD;
4499 /* Window starting occassion is retrieved using the gap and tried to
4500 * fit to the size of raReqLst array*/
4501 RGSCHDECRFRMCRNTTIME(frm, winStartFrm, winGap);
4503 //5G_TODO TIMING update. Need to check
4504 winStartIdx = (winStartFrm.sfn & 1) * RGSCH_MAX_RA_RNTI+ winStartFrm.slot;
4506 for(i = 0; ((i < cell->rachCfg.raWinSize) && (noRaRnti < RG_SCH_CMN_MAX_CMN_PDCCH)); i++)
4508 raIdx = (winStartIdx + i) % RGSCH_RAREQ_ARRAY_SIZE;
4510 if (cell->raInfo.raReqLst[raIdx].first != NULLP)
4512 allocInfo->raRspAlloc[noRaRnti].biEstmt = \
4513 (!i * RGSCH_ONE_BIHDR_SIZE);
4514 rarnti = raIdx % RGSCH_MAX_RA_RNTI+ 1;
4515 if (rgSCHCmnRaRspAlloc(cell, subFrm, raIdx,
4516 rarnti, noRaRnti, allocInfo) != ROK)
4518 /* The resources are exhausted */
4521 /* ccpu00132523- If all the RAP ids are not scheduled then need not
4522 * proceed for next RA RNTIs*/
4523 if(allocInfo->raRspAlloc[noRaRnti].numRapids < cell->raInfo.raReqLst[raIdx].count)
4527 noRaRnti++; /* Max of RG_SCH_CMN_MAX_CMN_PDCCH RARNTIs
4528 for response allocation */
4537 * @brief This function allocates the resources for an RARNTI.
4541 * Function: rgSCHCmnRaRspAlloc
4542 * Purpose: Allocate resources to a RARNTI.
4543 * 0. Allocate PDCCH for sending the response.
4544 * 1. Locate the number of RA requests pending for the RARNTI.
4545 * 2. Compute the size of data to be built.
4546 * 3. Using common channel CQI, compute the number of RBs.
4548 * Invoked by: Scheduler
4550 * @param[in] RgSchCellCb *cell,
4551 * @param[in] RgSchDlSf *subFrm,
4552 * @param[in] U16 rarnti,
4553 * @param[in] U8 noRaRnti
4554 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4559 PRIVATE S16 rgSCHCmnRaRspAlloc
4566 RgSchCmnDlRbAllocInfo *allocInfo
4569 PRIVATE S16 rgSCHCmnRaRspAlloc(cell,subFrm,raIndex,rarnti,noRaRnti,allocInfo)
4575 RgSchCmnDlRbAllocInfo *allocInfo;
4578 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4579 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4583 /*ccpu00116700,ccpu00116708- Corrected the wrong type for mcs*/
4586 /* RACH handling related changes */
4587 Bool isAlloc = FALSE;
4588 static U8 schdNumRapid = 0;
4594 U8 cfi = cellDl->currCfi;
4597 TRC2(rgSCHCmnRaRspAlloc);
4602 /* ccpu00132523: Resetting the schdRap Id count in every scheduling subframe*/
4609 if (subFrm->bw == subFrm->bwAssigned)
4611 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4612 "bw == bwAssigned RARNTI:%d",rarnti);
4616 reqLst = &cell->raInfo.raReqLst[raIndex];
4617 if (reqLst->count == 0)
4619 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4620 "reqLst Count=0 RARNTI:%d",rarnti);
4623 remNumRapid = reqLst->count;
4626 /* Limit number of rach rsps to maxMsg3PerUlsf */
4627 if ( schdNumRapid+remNumRapid > cellUl->maxMsg3PerUlSf )
4629 remNumRapid = cellUl->maxMsg3PerUlSf-schdNumRapid;
4635 /* Try allocating for as many RAPIDs as possible */
4636 /* BI sub-header size to the tbSize requirement */
4637 noBytes = RGSCH_GET_RAR_BYTES(remNumRapid) +\
4638 allocInfo->raRspAlloc[noRaRnti].biEstmt;
4639 if ((allwdTbSz = rgSCHUtlGetAllwdCchTbSz(noBytes*8, &nPrb, &mcs)) == -1)
4645 /* rgSCHCmnClcRbAllocForFxdTb(cell, allwdTbSz/8, cellDl->ccchCqi, &rb);*/
4646 if(cellDl->bitsPerRb==0)
4648 while ((rgTbSzTbl[0][0][rb]) <(U32) allwdTbSz)
4656 rb = RGSCH_CEIL(allwdTbSz, cellDl->bitsPerRb);
4658 /* DwPTS Scheduling Changes Start */
4660 if (subFrm->sfType == RG_SCH_SPL_SF_DATA)
4662 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
4664 /* Calculate the less RE's because of DwPTS */
4665 lostRe = rb * (cellDl->noResPerRb[cfi] -
4666 cellDl->numReDwPts[cfi]);
4668 /* Increase number of RBs in Spl SF to compensate for lost REs */
4669 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
4672 /* DwPTS Scheduling Changes End */
4674 /*ccpu00115595- end*/
4675 if (rb > subFrm->bw - subFrm->bwAssigned)
4680 /* Allocation succeeded for 'remNumRapid' */
4683 printf("\n!!!RAR alloc noBytes:%u,allwdTbSz:%u,tbs:%u,rb:%u\n",
4684 noBytes,allwdTbSz,tbs,rb);
4689 RLOG_ARG0(L_INFO,DBG_CELLID,cell->cellId,"BW alloc Failed");
4693 subFrm->bwAssigned = subFrm->bwAssigned + rb;
4695 /* Fill AllocInfo structure */
4696 allocInfo->raRspAlloc[noRaRnti].rnti = rarnti;
4697 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].bytesReq = tbs;
4698 allocInfo->raRspAlloc[noRaRnti].rbsReq = rb;
4699 allocInfo->raRspAlloc[noRaRnti].dlSf = subFrm;
4700 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].imcs = mcs;
4701 allocInfo->raRspAlloc[noRaRnti].raIndex = raIndex;
4702 /* RACH changes for multiple RAPID handling */
4703 allocInfo->raRspAlloc[noRaRnti].numRapids = remNumRapid;
4704 allocInfo->raRspAlloc[noRaRnti].nPrb = nPrb;
4705 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].noLyr = 1;
4706 allocInfo->raRspAlloc[noRaRnti].vrbgReq = RGSCH_CEIL(nPrb,MAX_5GTF_VRBG_SIZE);
4707 schdNumRapid += remNumRapid;
4711 /***********************************************************
4713 * Func : rgSCHCmnUlAllocFillRbInfo
4715 * Desc : Fills the start RB and the number of RBs for
4716 * uplink allocation.
4724 **********************************************************/
4726 PUBLIC Void rgSCHCmnUlAllocFillRbInfo
4733 PUBLIC Void rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc)
4736 RgSchUlAlloc *alloc;
4739 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4740 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4741 U8 cfi = cellDl->currCfi;
4744 TRC2(rgSCHCmnUlAllocFillRbInfo);
4745 alloc->grnt.rbStart = (alloc->sbStart * cellUl->sbSize) +
4746 cell->dynCfiCb.bwInfo[cfi].startRb;
4748 /* Num RBs = numSbAllocated * sbSize - less RBs in the last SB */
4749 alloc->grnt.numRb = (alloc->numSb * cellUl->sbSize);
4755 * @brief Grant request for Msg3.
4759 * Function : rgSCHCmnMsg3GrntReq
4761 * This is invoked by downlink scheduler to request allocation
4764 * - Attempt to allocate msg3 in the current msg3 subframe
4765 * Allocation attempt based on whether preamble is from group A
4766 * and the value of MESSAGE_SIZE_GROUP_A
4767 * - Link allocation with passed RNTI and msg3 HARQ process
4768 * - Set the HARQ process ID (*hqProcIdRef)
4770 * @param[in] RgSchCellCb *cell
4771 * @param[in] CmLteRnti rnti
4772 * @param[in] Bool preamGrpA
4773 * @param[in] RgSchUlHqProcCb *hqProc
4774 * @param[out] RgSchUlAlloc **ulAllocRef
4775 * @param[out] U8 *hqProcIdRef
4779 PRIVATE Void rgSCHCmnMsg3GrntReq
4784 RgSchUlHqProcCb *hqProc,
4785 RgSchUlAlloc **ulAllocRef,
4789 PRIVATE Void rgSCHCmnMsg3GrntReq(cell, rnti, preamGrpA, hqProc,
4790 ulAllocRef, hqProcIdRef)
4794 RgSchUlHqProcCb *hqProc;
4795 RgSchUlAlloc **ulAllocRef;
4799 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4800 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
4802 RgSchUlAlloc *alloc;
4806 TRC2(rgSCHCmnMsg3GrntReq);
4808 *ulAllocRef = NULLP;
4810 /* Fix: ccpu00120610 Use remAllocs from subframe during msg3 allocation */
4811 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
4815 if (preamGrpA == FALSE)
4817 numSb = cellUl->ra.prmblBNumSb;
4818 iMcs = cellUl->ra.prmblBIMcs;
4822 numSb = cellUl->ra.prmblANumSb;
4823 iMcs = cellUl->ra.prmblAIMcs;
4826 if ((hole = rgSCHUtlUlHoleFirst(sf)) != NULLP)
4828 if(*sf->allocCountRef == 0)
4830 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4831 /* Reinitialize the hole */
4832 if (sf->holeDb->count == 1 && (hole->start == 0)) /* Sanity check of holeDb */
4834 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4835 /* Re-Initialize available subbands because of CFI change*/
4836 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4840 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4841 "Error! holeDb sanity check failed RNTI:%d",rnti);
4844 if (numSb <= hole->num)
4847 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
4848 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
4849 alloc->grnt.iMcs = iMcs;
4850 alloc->grnt.iMcsCrnt = iMcs;
4851 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
4852 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
4853 /* To include the length and ModOrder in DataRecp Req.*/
4854 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
4855 RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
4856 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
4857 alloc->grnt.nDmrs = 0;
4858 alloc->grnt.hop = 0;
4859 alloc->grnt.delayBit = 0;
4860 alloc->grnt.isRtx = FALSE;
4861 *ulAllocRef = alloc;
4862 *hqProcIdRef = (cellUl->msg3SchdHqProcIdx);
4863 hqProc->procId = *hqProcIdRef;
4864 hqProc->ulSfIdx = (cellUl->msg3SchdIdx);
4867 alloc->pdcch = FALSE;
4868 alloc->forMsg3 = TRUE;
4869 alloc->hqProc = hqProc;
4870 rgSCHUhmNewTx(hqProc, (U8)(cell->rachCfg.maxMsg3Tx - 1), alloc);
4871 //RLOG_ARG4(L_DEBUG,DBG_CELLID,cell->cellId,
4873 "\nRNTI:%d MSG3 ALLOC proc(%p)procId(%d)schdIdx(%d)\n",
4875 ((PTR)alloc->hqProc),
4876 alloc->hqProc->procId,
4877 alloc->hqProc->ulSfIdx);
4878 RLOG_ARG2(L_DEBUG,DBG_CELLID,cell->cellId,
4879 "alloc(%p)maxMsg3Tx(%d)",
4881 cell->rachCfg.maxMsg3Tx);
4890 * @brief This function determines the allocation limits and
4891 * parameters that aid in DL scheduling.
4895 * Function: rgSCHCmnDlSetUeAllocLmt
4896 * Purpose: This function determines the Maximum RBs
4897 * a UE is eligible to get based on softbuffer
4898 * limitation and cell->>>maxDlBwPerUe. The Codeword
4899 * specific parameters like iTbs, eff and noLyrs
4900 * are also set in this function. This function
4901 * is called while UE configuration and UeDlCqiInd.
4903 * Invoked by: Scheduler
4905 * @param[in] RgSchCellCb *cellCb
4906 * @param[in] RgSchCmnDlUe *ueDl
4911 PRIVATE Void rgSCHCmnDlSetUeAllocLmt
4918 PRIVATE Void rgSCHCmnDlSetUeAllocLmt(cell, ueDl, isEmtcUe)
4926 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
4927 U8 cfi = cellSch->dl.currCfi;
4929 TRC2(rgSCHCmnDlSetUeAllocLmt);
4932 if(TRUE == isEmtcUe)
4934 /* ITbs for CW0 for 1 Layer Tx */
4935 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4936 [ueDl->mimoInfo.cwInfo[0].cqi];
4937 /* ITbs for CW0 for 2 Layer Tx */
4938 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4939 [ueDl->mimoInfo.cwInfo[0].cqi];
4940 /* Eff for CW0 for 1 Layer Tx */
4941 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4942 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4943 /* Eff for CW0 for 2 Layer Tx */
4944 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4945 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4947 /* ITbs for CW1 for 1 Layer Tx */
4948 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4949 [ueDl->mimoInfo.cwInfo[1].cqi];
4950 /* ITbs for CW1 for 2 Layer Tx */
4951 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4952 [ueDl->mimoInfo.cwInfo[1].cqi];
4953 /* Eff for CW1 for 1 Layer Tx */
4954 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4955 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4956 /* Eff for CW1 for 2 Layer Tx */
4957 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4958 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4963 /* ITbs for CW0 for 1 Layer Tx */
4964 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4965 [ueDl->mimoInfo.cwInfo[0].cqi];
4966 /* ITbs for CW0 for 2 Layer Tx */
4967 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4968 [ueDl->mimoInfo.cwInfo[0].cqi];
4969 /* Eff for CW0 for 1 Layer Tx */
4970 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4971 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4972 /* Eff for CW0 for 2 Layer Tx */
4973 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4974 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4976 /* ITbs for CW1 for 1 Layer Tx */
4977 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4978 [ueDl->mimoInfo.cwInfo[1].cqi];
4979 /* ITbs for CW1 for 2 Layer Tx */
4980 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4981 [ueDl->mimoInfo.cwInfo[1].cqi];
4982 /* Eff for CW1 for 1 Layer Tx */
4983 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4984 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4985 /* Eff for CW1 for 2 Layer Tx */
4986 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4987 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4991 // ueDl->laCb.cqiBasediTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0] * 100;
4993 /* Assigning noLyrs to each CW assuming optimal Spatial multiplexing
4995 (ueDl->mimoInfo.ri/2 == 0)? (ueDl->mimoInfo.cwInfo[0].noLyr = 1) : \
4996 (ueDl->mimoInfo.cwInfo[0].noLyr = ueDl->mimoInfo.ri/2);
4997 ueDl->mimoInfo.cwInfo[1].noLyr = ueDl->mimoInfo.ri - ueDl->mimoInfo.cwInfo[0].noLyr;
4998 /* rg002.101:ccpu00102106: correcting DL harq softbuffer limitation logic.
4999 * The maxTbSz is the maximum number of PHY bits a harq process can
5000 * hold. Hence we limit our allocation per harq process based on this.
5001 * Earlier implementation we misinterpreted the maxTbSz to be per UE
5002 * per TTI, but in fact it is per Harq per TTI. */
5003 /* rg002.101:ccpu00102106: cannot exceed the harq Tb Size
5004 * and harq Soft Bits limit.*/
5006 /* Considering iTbs corresponding to 2 layer transmission for
5007 * codeword0(approximation) and the maxLayers supported by
5008 * this UE at this point of time. */
5009 RG_SCH_CMN_TBS_TO_MODODR(ueDl->mimoInfo.cwInfo[0].iTbs[1], modOrder);
5011 /* Bits/modOrder gives #REs, #REs/noResPerRb gives #RBs */
5012 /* rg001.301 -MOD- [ccpu00119213] : avoiding wraparound */
5013 maxRb = ((ueDl->maxSbSz)/(cellSch->dl.noResPerRb[cfi] * modOrder *\
5014 ueDl->mimoInfo.ri));
5015 if (cellSch->dl.isDlFreqSel)
5017 /* Rounding off to left nearest multiple of RBG size */
5018 maxRb -= maxRb % cell->rbgSize;
5020 ueDl->maxRb = RGSCH_MIN(maxRb, cellSch->dl.maxDlBwPerUe);
5021 if (cellSch->dl.isDlFreqSel)
5023 /* Rounding off to right nearest multiple of RBG size */
5024 if (ueDl->maxRb % cell->rbgSize)
5026 ueDl->maxRb += (cell->rbgSize -
5027 (ueDl->maxRb % cell->rbgSize));
5031 /* Set the index of the cwInfo, which is better in terms of
5032 * efficiency. If RI<2, only 1 CW, hence btrCwIdx shall be 0 */
5033 if (ueDl->mimoInfo.ri < 2)
5035 ueDl->mimoInfo.btrCwIdx = 0;
5039 if (ueDl->mimoInfo.cwInfo[0].eff[ueDl->mimoInfo.cwInfo[0].noLyr-1] <\
5040 ueDl->mimoInfo.cwInfo[1].eff[ueDl->mimoInfo.cwInfo[1].noLyr-1])
5042 ueDl->mimoInfo.btrCwIdx = 1;
5046 ueDl->mimoInfo.btrCwIdx = 0;
5056 * @brief This function updates TX Scheme.
5060 * Function: rgSCHCheckAndSetTxScheme
5061 * Purpose: This function determines the Maximum RBs
5062 * a UE is eligible to get based on softbuffer
5063 * limitation and cell->>>maxDlBwPerUe. The Codeword
5064 * specific parameters like iTbs, eff and noLyrs
5065 * are also set in this function. This function
5066 * is called while UE configuration and UeDlCqiInd.
5068 * Invoked by: Scheduler
5070 * @param[in] RgSchCellCb *cell
5071 * @param[in] RgSchUeCb *ue
5076 PRIVATE Void rgSCHCheckAndSetTxScheme
5082 PRIVATE Void rgSCHCheckAndSetTxScheme(cell, ue)
5087 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5088 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue ,cell);
5089 U8 cfi = cellSch->dl.currCfi;
5094 TRC2(rgSCHCheckAndSetTxScheme);
5096 maxiTbs = (*(RgSchCmnCqiToTbs*)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
5097 [RG_SCH_CMN_MAX_CQI - 1];
5098 cqiBasediTbs = (ueDl->laCb[0].cqiBasediTbs)/100;
5099 actualiTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0];
5101 if((actualiTbs < RG_SCH_TXSCHEME_CHNG_ITBS_FACTOR) && (cqiBasediTbs >
5102 actualiTbs) && ((cqiBasediTbs - actualiTbs) > RG_SCH_TXSCHEME_CHNG_THRSHD))
5104 RG_SCH_CMN_SET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5107 if(actualiTbs >= maxiTbs)
5109 RG_SCH_CMN_UNSET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5116 * @brief This function determines the allocation limits and
5117 * parameters that aid in DL scheduling.
5121 * Function: rgSCHCmnDlSetUeAllocLmtLa
5122 * Purpose: This function determines the Maximum RBs
5123 * a UE is eligible to get based on softbuffer
5124 * limitation and cell->>>maxDlBwPerUe. The Codeword
5125 * specific parameters like iTbs, eff and noLyrs
5126 * are also set in this function. This function
5127 * is called while UE configuration and UeDlCqiInd.
5129 * Invoked by: Scheduler
5131 * @param[in] RgSchCellCb *cell
5132 * @param[in] RgSchUeCb *ue
5137 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa
5143 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa(cell, ue)
5151 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5152 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
5153 U8 cfi = cellSch->dl.currCfi;
5157 TRC2(rgSCHCmnDlSetUeAllocLmtLa);
5159 maxiTbs = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[RG_SCH_CMN_MAX_CQI - 1];
5160 if(ueDl->cqiFlag == TRUE)
5162 for(cwIdx=0; cwIdx < RG_SCH_CMN_MAX_CW_PER_UE; cwIdx++)
5166 /* Calcluating the reported iTbs for code word 0 */
5167 reportediTbs = ue->ue5gtfCb.mcs;
5169 iTbsNew = (S32) reportediTbs;
5171 if(!ueDl->laCb[cwIdx].notFirstCqi)
5173 /* This is the first CQI report from UE */
5174 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5175 ueDl->laCb[cwIdx].notFirstCqi = TRUE;
5177 else if ((RG_ITBS_DIFF(reportediTbs, ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0])) > 5)
5179 /* Ignore this iTBS report and mark that last iTBS report was */
5180 /* ignored so that subsequently we reset the LA algorithm */
5181 ueDl->laCb[cwIdx].lastiTbsIgnored = TRUE;
5182 ueDl->laCb[cwIdx].numLastiTbsIgnored++;
5183 if( ueDl->laCb[cwIdx].numLastiTbsIgnored > 10)
5185 /* CQI reported by UE is not catching up. Reset the LA algorithm */
5186 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5187 ueDl->laCb[cwIdx].deltaiTbs = 0;
5188 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5189 ueDl->laCb[cwIdx].numLastiTbsIgnored = 0;
5194 if (ueDl->laCb[cwIdx].lastiTbsIgnored != TRUE)
5196 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5197 (80 * ueDl->laCb[cwIdx].cqiBasediTbs))/100;
5201 /* Reset the LA as iTbs in use caught up with the value */
5202 /* reported by UE. */
5203 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5204 (80 * ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] * 100))/100;
5205 ueDl->laCb[cwIdx].deltaiTbs = 0;
5206 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5210 iTbsNew = (ueDl->laCb[cwIdx].cqiBasediTbs + ueDl->laCb[cwIdx].deltaiTbs)/100;
5212 RG_SCH_CHK_ITBS_RANGE(iTbsNew, maxiTbs);
5214 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] = RGSCH_MIN(iTbsNew, cell->thresholds.maxDlItbs);
5215 //ueDl->mimoInfo.cwInfo[cwIdx].iTbs[1] = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5217 ue->ue5gtfCb.mcs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5219 printf("reportediTbs[%d] cqiBasediTbs[%d] deltaiTbs[%d] iTbsNew[%d] mcs[%d] cwIdx[%d]\n",
5220 reportediTbs, ueDl->laCb[cwIdx].cqiBasediTbs, ueDl->laCb[cwIdx].deltaiTbs,
5221 iTbsNew, ue->ue5gtfCb.mcs, cwIdx);
5225 if((ue->mimoInfo.txMode != RGR_UE_TM_3) && (ue->mimoInfo.txMode != RGR_UE_TM_4))
5230 ueDl->cqiFlag = FALSE;
5237 /***********************************************************
5239 * Func : rgSCHCmnDlUeResetTemp
5241 * Desc : Reset whatever variables where temporarily used
5242 * during UE scheduling.
5250 **********************************************************/
5252 PUBLIC Void rgSCHCmnDlHqPResetTemp
5254 RgSchDlHqProcCb *hqP
5257 PUBLIC Void rgSCHCmnDlHqPResetTemp(hqP)
5258 RgSchDlHqProcCb *hqP;
5262 TRC2(rgSCHCmnDlHqPResetTemp);
5264 /* Fix: syed having a hqP added to Lists for RB assignment rather than
5265 * a UE, as adding UE was limiting handling some scenarios */
5266 hqP->reqLnk.node = (PTR)NULLP;
5267 hqP->schdLstLnk.node = (PTR)NULLP;
5270 } /* rgSCHCmnDlHqPResetTemp */
5272 /***********************************************************
5274 * Func : rgSCHCmnDlUeResetTemp
5276 * Desc : Reset whatever variables where temporarily used
5277 * during UE scheduling.
5285 **********************************************************/
5287 PUBLIC Void rgSCHCmnDlUeResetTemp
5290 RgSchDlHqProcCb *hqP
5293 PUBLIC Void rgSCHCmnDlUeResetTemp(ue, hqP)
5295 RgSchDlHqProcCb *hqP;
5298 RgSchDlRbAlloc *allocInfo;
5299 RgSchCmnDlUe *cmnUe = RG_SCH_CMN_GET_DL_UE(ue,hqP->hqE->cell);
5304 TRC2(rgSCHCmnDlUeResetTemp);
5306 /* Fix : syed check for UE's existence was useless.
5307 * Instead we need to check that reset is done only for the
5308 * information of a scheduled harq proc, which is cmnUe->proc.
5309 * Reset should not be done for non-scheduled hqP */
5310 if((cmnUe->proc == hqP) || (cmnUe->proc == NULLP))
5312 cmnUe->proc = NULLP;
5313 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, hqP->hqE->cell);
5315 tmpCb = allocInfo->laaCb;
5317 cmMemset((U8 *)allocInfo, (U8)0, sizeof(RgSchDlRbAlloc));
5318 allocInfo->rnti = ue->ueId;
5320 allocInfo->laaCb = tmpCb;
5322 /* Fix: syed moving this to a common function for both scheduled
5323 * and non-scheduled UEs */
5324 cmnUe->outStndAlloc = 0;
5326 rgSCHCmnDlHqPResetTemp(hqP);
5329 } /* rgSCHCmnDlUeResetTemp */
5331 /***********************************************************
5333 * Func : rgSCHCmnUlUeResetTemp
5335 * Desc : Reset whatever variables where temporarily used
5336 * during UE scheduling.
5344 **********************************************************/
5346 PUBLIC Void rgSCHCmnUlUeResetTemp
5352 PUBLIC Void rgSCHCmnUlUeResetTemp(cell, ue)
5357 RgSchCmnUlUe *cmnUlUe = RG_SCH_CMN_GET_UL_UE(ue,cell);
5359 TRC2(rgSCHCmnUlUeResetTemp);
5361 cmMemset((U8 *)&cmnUlUe->alloc, (U8)0, sizeof(cmnUlUe->alloc));
5364 } /* rgSCHCmnUlUeResetTemp */
5369 * @brief This function fills the PDCCH information from dlProc.
5373 * Function: rgSCHCmnFillPdcch
5374 * Purpose: This function fills in the PDCCH information
5375 * obtained from the RgSchDlRbAlloc
5376 * during common channel scheduling(P, SI, RA - RNTI's).
5378 * Invoked by: Downlink Scheduler
5380 * @param[out] RgSchPdcch* pdcch
5381 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5386 PUBLIC Void rgSCHCmnFillPdcch
5390 RgSchDlRbAlloc *rbAllocInfo
5393 PUBLIC Void rgSCHCmnFillPdcch(cell, pdcch, rbAllocInfo)
5396 RgSchDlRbAlloc *rbAllocInfo;
5400 TRC2(rgSCHCmnFillPdcch);
5402 /* common channel pdcch filling,
5403 * only 1A and Local is supported */
5404 pdcch->rnti = rbAllocInfo->rnti;
5405 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
5406 switch(rbAllocInfo->dciFormat)
5408 #ifdef RG_5GTF /* ANOOP: ToDo: DCI format B1/B2 filling */
5409 case TFU_DCI_FORMAT_B1:
5412 pdcch->dci.u.formatB1Info.formatType = 0;
5413 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].cmnGrnt.xPDSCHRange;
5414 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].cmnGrnt.rbAssign;
5415 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = 0;
5416 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5417 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = 0;
5418 //pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].ndi;
5419 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].cmnGrnt.rv;
5420 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5421 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5422 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5423 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5424 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5425 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5426 //TODO_SID: Need to update
5427 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5428 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5429 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5430 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5431 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5432 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5433 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].cmnGrnt.SCID;
5434 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5435 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5436 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5438 break; /* case TFU_DCI_FORMAT_B1: */
5441 case TFU_DCI_FORMAT_B2:
5443 //printf(" RG_5GTF:: Pdcch filling with DCI format B2\n");
5445 break; /* case TFU_DCI_FORMAT_B2: */
5448 case TFU_DCI_FORMAT_1A:
5449 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
5451 /*Nprb indication at PHY for common Ch
5452 *setting least significant bit of tpc field to 1 if
5453 nPrb=3 and 0 otherwise. */
5454 if (rbAllocInfo->nPrb == 3)
5456 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 1;
5460 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 0;
5462 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
5463 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
5464 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
5465 rbAllocInfo->tbInfo[0].imcs;
5466 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = 0;
5467 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = 0;
5469 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
5471 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
5472 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
5473 rbAllocInfo->allocInfo.raType2.rbStart,
5474 rbAllocInfo->allocInfo.raType2.numRb);
5477 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = \
5480 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
5481 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
5484 break; /* case TFU_DCI_FORMAT_1A: */
5485 case TFU_DCI_FORMAT_1:
5486 pdcch->dci.u.format1Info.tpcCmd = 0;
5487 /* Avoiding this check,as we dont support Type1 RA */
5489 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
5492 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
5493 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
5494 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
5496 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
5497 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
5499 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
5500 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
5502 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
5503 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
5507 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
5508 pdcch->dci.u.format1Info.allocInfo.ndi = 0;
5509 pdcch->dci.u.format1Info.allocInfo.mcs = rbAllocInfo->tbInfo[0].imcs;
5510 pdcch->dci.u.format1Info.allocInfo.rv = 0;
5512 pdcch->dci.u.format1Info.dai = 1;
5516 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Allocator's icorrect "
5517 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
5525 * @brief This function finds whether the subframe is special subframe or not.
5529 * Function: rgSCHCmnIsSplSubfrm
5530 * Purpose: This function finds the subframe index of the special subframe
5531 * and finds whether the current DL index matches it or not.
5533 * Invoked by: Scheduler
5535 * @param[in] U8 splfrmCnt
5536 * @param[in] U8 curSubfrmIdx
5537 * @param[in] U8 periodicity
5538 * @param[in] RgSchTddSubfrmInfo *subfrmInfo
5543 PRIVATE Bool rgSCHCmnIsSplSubfrm
5548 RgSchTddSubfrmInfo *subfrmInfo
5551 PRIVATE Bool rgSCHCmnIsSplSubfrm(splfrmCnt, curSubfrmIdx, periodicity, subfrmInfo)
5555 RgSchTddSubfrmInfo *subfrmInfo;
5561 TRC2(rgSCHCmnIsSplSubfrm);
5565 if(periodicity == RG_SCH_CMN_5_MS_PRD)
5569 dlSfCnt = ((splfrmCnt-1)/2) *\
5570 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5571 dlSfCnt = dlSfCnt + subfrmInfo->numFrmHf1;
5575 dlSfCnt = (splfrmCnt/2) * \
5576 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5581 dlSfCnt = splfrmCnt * subfrmInfo->numFrmHf1;
5583 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1 +\
5584 (periodicity*splfrmCnt - dlSfCnt);
5588 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1;
5591 if(splfrmIdx == curSubfrmIdx)
5600 * @brief This function updates DAI or UL index.
5604 * Function: rgSCHCmnUpdHqAndDai
5605 * Purpose: Updates the DAI based on UL-DL Configuration
5606 * index and UE. It also updates the HARQ feedback
5607 * time and 'm' index.
5611 * @param[in] RgDlHqProcCb *hqP
5612 * @param[in] RgSchDlSf *subFrm
5613 * @param[in] RgSchDlHqTbCb *tbCb
5614 * @param[in] U8 tbAllocIdx
5619 PRIVATE Void rgSCHCmnUpdHqAndDai
5621 RgSchDlHqProcCb *hqP,
5623 RgSchDlHqTbCb *tbCb,
5627 PRIVATE Void rgSCHCmnUpdHqAndDai(hqP, subFrm, tbCb,tbAllocIdx)
5628 RgSchDlHqProcCb *hqP;
5630 RgSchDlHqTbCb *tbCb;
5634 RgSchUeCb *ue = hqP->hqE->ue;
5636 TRC2(rgSCHCmnUpdHqAndDai);
5640 /* set the time at which UE shall send the feedback
5641 * for this process */
5642 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5643 subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5644 tbCb->fdbkTime.subframe = subFrm->dlFdbkInfo.subframe;
5645 tbCb->m = subFrm->dlFdbkInfo.m;
5649 /* set the time at which UE shall send the feedback
5650 * for this process */
5651 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5652 hqP->subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5653 tbCb->fdbkTime.subframe = hqP->subFrm->dlFdbkInfo.subframe;
5654 tbCb->m = hqP->subFrm->dlFdbkInfo.m;
5657 /* ccpu00132340-MOD- DAI need to be updated for first TB only*/
5658 if(ue && !tbAllocIdx)
5660 Bool havePdcch = (tbCb->hqP->pdcch ? TRUE : FALSE);
5663 dlDai = rgSCHCmnUpdDai(ue, &tbCb->fdbkTime, tbCb->m, havePdcch,tbCb->hqP,
5666 {/* Non SPS occasions */
5667 tbCb->hqP->pdcch->dlDai = dlDai;
5668 /* hqP->ulDai is used for N1 resource filling
5669 * when SPS occaions present in a bundle */
5670 tbCb->hqP->ulDai = tbCb->dai;
5671 tbCb->hqP->dlDai = dlDai;
5675 /* Updatijng pucchFdbkIdx for both PUCCH or PUSCH
5677 tbCb->pucchFdbkIdx = tbCb->hqP->ulDai;
5684 * @brief This function updates DAI or UL index.
5688 * Function: rgSCHCmnUpdDai
5689 * Purpose: Updates the DAI in the ack-nack info, a valid
5690 * ue should be passed
5694 * @param[in] RgDlHqProcCb *hqP
5695 * @param[in] RgSchDlSf *subFrm
5696 * @param[in] RgSchDlHqTbCb *tbCb
5701 PUBLIC U8 rgSCHCmnUpdDai
5704 CmLteTimingInfo *fdbkTime,
5707 RgSchDlHqProcCb *hqP,
5711 PUBLIC U8 rgSCHCmnUpdDai(ue, fdbkTime, m, havePdcch,tbCb,servCellId,hqP,ulDai)
5713 CmLteTimingInfo *fdbkTime;
5716 RgSchDlHqProcCb *hqP;
5720 RgSchTddANInfo *anInfo;
5722 U8 ackNackFdbkArrSize;
5725 TRC2(rgSCHCmnUpdDai);
5730 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
5731 hqP->hqE->cell->cellId,
5734 servCellIdx = RGSCH_PCELL_INDEX;
5736 ackNackFdbkArrSize = hqP->hqE->cell->ackNackFdbkArrSize;
5738 {/* SPS on primary cell */
5739 servCellIdx = RGSCH_PCELL_INDEX;
5740 ackNackFdbkArrSize = ue->cell->ackNackFdbkArrSize;
5744 anInfo = rgSCHUtlGetUeANFdbkInfo(ue, fdbkTime,servCellIdx);
5746 /* If no ACK/NACK feedback already present, create a new one */
5749 anInfo = &ue->cellInfo[servCellIdx]->anInfo[ue->cellInfo[servCellIdx]->nextFreeANIdx];
5750 anInfo->sfn = fdbkTime->sfn;
5751 anInfo->subframe = fdbkTime->subframe;
5752 anInfo->latestMIdx = m;
5753 /* Fixing DAI value - ccpu00109162 */
5754 /* Handle TDD case as in MIMO definition of the function */
5760 anInfo->isSpsOccasion = FALSE;
5761 /* set the free Index to store Ack/Nack Information*/
5762 ue->cellInfo[servCellIdx]->nextFreeANIdx = (ue->cellInfo[servCellIdx]->nextFreeANIdx + 1) %
5768 anInfo->latestMIdx = m;
5769 /* Fixing DAI value - ccpu00109162 */
5770 /* Handle TDD case as in MIMO definition of the function */
5771 anInfo->ulDai = anInfo->ulDai + 1;
5774 anInfo->dlDai = anInfo->dlDai + 1;
5778 /* ignoring the Scell check,
5779 * for primary cell this field is unused*/
5782 anInfo->n1ResTpcIdx = hqP->tpc;
5786 {/* As this not required for release pdcch */
5787 *ulDai = anInfo->ulDai;
5790 RETVALUE(anInfo->dlDai);
5793 #endif /* ifdef LTE_TDD */
5795 PUBLIC U32 rgHqRvRetxCnt[4][2];
5796 PUBLIC U32 rgUlrate_grant;
5799 * @brief This function fills the HqP TB with rbAllocInfo.
5803 * Function: rgSCHCmnFillHqPTb
5804 * Purpose: This function fills in the HqP TB with rbAllocInfo.
5806 * Invoked by: rgSCHCmnFillHqPTb
5808 * @param[in] RgSchCellCb* cell
5809 * @param[in] RgSchDlRbAlloc *rbAllocInfo,
5810 * @param[in] U8 tbAllocIdx
5811 * @param[in] RgSchPdcch *pdcch
5817 PUBLIC Void rgSCHCmnFillHqPTb
5820 RgSchDlRbAlloc *rbAllocInfo,
5825 PUBLIC Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5827 RgSchDlRbAlloc *rbAllocInfo;
5833 PRIVATE Void rgSCHCmnFillHqPTb
5836 RgSchDlRbAlloc *rbAllocInfo,
5841 PRIVATE Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5843 RgSchDlRbAlloc *rbAllocInfo;
5847 #endif /* LTEMAC_SPS */
5849 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
5850 RgSchDlTbAllocInfo *tbAllocInfo = &rbAllocInfo->tbInfo[tbAllocIdx];
5851 RgSchDlHqTbCb *tbInfo = tbAllocInfo->tbCb;
5852 RgSchDlHqProcCb *hqP = tbInfo->hqP;
5854 TRC2(rgSCHCmnFillHqPTb);
5856 /*ccpu00120365-ADD-if tb is disabled, set mcs=0,rv=1.
5857 * Relevant for DCI format 2 & 2A as per 36.213-7.1.7.2
5859 if ( tbAllocInfo->isDisabled)
5862 tbInfo->dlGrnt.iMcs = 0;
5863 tbInfo->dlGrnt.rv = 1;
5865 /* Fill for TB retransmission */
5866 else if (tbInfo->txCntr > 0)
5869 tbInfo->timingInfo = cmnCellDl->time;
5871 if ((tbInfo->isAckNackDtx == TFU_HQFDB_DTX))
5873 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5874 rgHqRvRetxCnt[tbInfo->dlGrnt.rv][tbInfo->tbIdx]++;
5878 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[++(tbInfo->ccchSchdInfo.rvIdx) & 0x03];
5881 /* fill the scheduler information of hqProc */
5882 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5883 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx,hqP->tbInfo,tbInfo->tbIdx );
5884 rgSCHDhmHqTbRetx(hqP->hqE, tbInfo->timingInfo, hqP, tbInfo->tbIdx);
5886 /* Fill for TB transmission */
5889 /* Fill the HqProc */
5890 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5891 tbInfo->tbSz = tbAllocInfo->bytesAlloc;
5892 tbInfo->timingInfo = cmnCellDl->time;
5894 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[0];
5895 /* fill the scheduler information of hqProc */
5896 tbInfo->ccchSchdInfo.rvIdx = 0;
5897 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5898 /* DwPts Scheduling Changes Start */
5899 /* DwPts Scheduling Changes End */
5900 cell->measurements.dlBytesCnt += tbAllocInfo->bytesAlloc;
5903 /*ccpu00120365:-ADD-only add to subFrm list if tb is not disabled */
5904 if ( tbAllocInfo->isDisabled == FALSE )
5906 /* Set the number of transmitting SM layers for this TB */
5907 tbInfo->numLyrs = tbAllocInfo->noLyr;
5908 /* Set the TB state as WAITING to indicate TB has been
5909 * considered for transmission */
5910 tbInfo->state = HQ_TB_WAITING;
5911 hqP->subFrm = rbAllocInfo->dlSf;
5912 tbInfo->hqP->pdcch = pdcch;
5913 //tbInfo->dlGrnt.numRb = rbAllocInfo->rbsAlloc;
5914 rgSCHUtlDlHqPTbAddToTx(hqP->subFrm, hqP, tbInfo->tbIdx);
5920 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
5924 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
5925 * Purpose: This function fills in the PDCCH information
5926 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
5927 * for dedicated service scheduling. It also
5928 * obtains TPC to be filled in from the power module.
5929 * Assign the PDCCH to HQProc.
5931 * Invoked by: Downlink Scheduler
5933 * @param[in] RgSchCellCb* cell
5934 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5935 * @param[in] RgDlHqProc* hqP
5936 * @param[out] RgSchPdcch *pdcch
5942 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2
5945 RgSchDlRbAlloc *rbAllocInfo,
5946 RgSchDlHqProcCb *hqP,
5951 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, pdcch, tpc)
5953 RgSchDlRbAlloc *rbAllocInfo;
5954 RgSchDlHqProcCb *hqP;
5960 TRC2(rgSCHCmnFillHqPPdcchDciFrmtB1B2)
5962 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
5963 //Currently hardcoding values here.
5964 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
5965 switch(rbAllocInfo->dciFormat)
5967 case TFU_DCI_FORMAT_B1:
5969 pdcch->dci.u.formatB1Info.formatType = 0;
5970 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5971 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
5972 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
5973 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5974 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
5975 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
5976 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5977 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5978 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5979 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5980 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5981 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5982 //TODO_SID: Need to update
5983 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5984 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5985 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5986 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5987 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5988 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5989 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
5990 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5991 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5992 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5995 case TFU_DCI_FORMAT_B2:
5997 pdcch->dci.u.formatB2Info.formatType = 1;
5998 pdcch->dci.u.formatB2Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5999 pdcch->dci.u.formatB2Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
6000 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
6001 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
6002 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6003 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6004 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
6005 pdcch->dci.u.formatB2Info.CSI_BSI_BRI_Req = 0;
6006 pdcch->dci.u.formatB2Info.CSIRS_BRRS_TxTiming = 0;
6007 pdcch->dci.u.formatB2Info.CSIRS_BRRS_SymbIdx = 0;
6008 pdcch->dci.u.formatB2Info.CSIRS_BRRS_ProcInd = 0;
6009 pdcch->dci.u.formatB2Info.xPUCCH_TxTiming = 0;
6010 //TODO_SID: Need to update
6011 pdcch->dci.u.formatB2Info.freqResIdx_xPUCCH = 0;
6012 pdcch->dci.u.formatB2Info.beamSwitch = 0;
6013 pdcch->dci.u.formatB2Info.SRS_Config = 0;
6014 pdcch->dci.u.formatB2Info.SRS_Symbol = 0;
6015 //TODO_SID: Need to check.Currently setting 4(2 layer, ports(8,9) w/o OCC).
6016 pdcch->dci.u.formatB2Info.AntPorts_numLayers = 4;
6017 pdcch->dci.u.formatB2Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
6018 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
6019 pdcch->dci.u.formatB2Info.tpcCmd = 1; //tpc;
6020 pdcch->dci.u.formatB2Info.DL_PCRS = 0;
6024 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId," 5GTF_ERROR Allocator's icorrect "
6025 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
6032 extern U32 totPcellSCell;
6033 extern U32 addedForScell;
6034 extern U32 addedForScell1;
6035 extern U32 addedForScell2;
6037 * @brief This function fills the PDCCH information from dlProc.
6041 * Function: rgSCHCmnFillHqPPdcch
6042 * Purpose: This function fills in the PDCCH information
6043 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6044 * for dedicated service scheduling. It also
6045 * obtains TPC to be filled in from the power module.
6046 * Assign the PDCCH to HQProc.
6048 * Invoked by: Downlink Scheduler
6050 * @param[in] RgSchCellCb* cell
6051 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6052 * @param[in] RgDlHqProc* hqP
6057 PUBLIC Void rgSCHCmnFillHqPPdcch
6060 RgSchDlRbAlloc *rbAllocInfo,
6061 RgSchDlHqProcCb *hqP
6064 PUBLIC Void rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP)
6066 RgSchDlRbAlloc *rbAllocInfo;
6067 RgSchDlHqProcCb *hqP;
6070 RgSchCmnDlCell *cmnCell = RG_SCH_CMN_GET_DL_CELL(cell);
6071 RgSchPdcch *pdcch = rbAllocInfo->pdcch;
6074 TRC2(rgSCHCmnFillHqPPdcch);
6079 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6086 tpc = rgSCHPwrPucchTpcForUe(cell, hqP->hqE->ue);
6088 /* Fix: syed moving this to a common function for both scheduled
6089 * and non-scheduled UEs */
6091 pdcch->ue = hqP->hqE->ue;
6092 if (hqP->hqE->ue->csgMmbrSta == FALSE)
6094 cmnCell->ncsgPrbCnt += rbAllocInfo->rbsAlloc;
6096 cmnCell->totPrbCnt += rbAllocInfo->rbsAlloc;
6099 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlPrbUsg +=
6100 rbAllocInfo->rbsAlloc;
6101 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw0iTbs +=
6102 rbAllocInfo->tbInfo[0].iTbs;
6103 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw0iTbs ++;
6104 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6105 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6108 totPcellSCell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6109 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6111 addedForScell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6112 addedForScell1 += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6114 printf (" Hqp %d cell %d addedForScell %lu addedForScell1 %lu sfn:sf %d:%d \n",
6116 hqP->hqE->cell->cellId,
6120 cell->crntTime.slot);
6124 hqP->hqE->cell->tenbStats->sch.dlPrbUsage[0] +=
6125 rbAllocInfo->rbsAlloc;
6126 hqP->hqE->cell->tenbStats->sch.dlSumCw0iTbs +=
6127 rbAllocInfo->tbInfo[0].iTbs;
6128 hqP->hqE->cell->tenbStats->sch.dlNumCw0iTbs ++;
6129 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6130 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6131 if (rbAllocInfo->tbInfo[1].schdlngForTb)
6133 hqP->hqE->cell->tenbStats->sch.dlSumCw1iTbs +=
6134 rbAllocInfo->tbInfo[1].iTbs;
6135 hqP->hqE->cell->tenbStats->sch.dlNumCw1iTbs ++;
6136 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw1iTbs +=
6137 rbAllocInfo->tbInfo[1].iTbs;
6138 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw1iTbs ++;
6139 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6140 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6144 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6146 addedForScell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6147 addedForScell2 += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6149 printf (" Hqp %d cell %d addedForScell %lu addedForScell2 %lu \n",
6151 hqP->hqE->cell->cellId,
6156 totPcellSCell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6160 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6161 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6164 printf ("add DL TPT is %lu sfn:sf %d:%d \n", hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt ,
6166 cell->crntTime.slot);
6172 pdcch->rnti = rbAllocInfo->rnti;
6173 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
6174 /* Update subframe and pdcch info in HqTb control block */
6175 switch(rbAllocInfo->dciFormat)
6178 case TFU_DCI_FORMAT_B1:
6179 case TFU_DCI_FORMAT_B2:
6181 // printf(" RG_5GTF:: Pdcch filling with DCI format B1/B2\n");
6182 rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, \
6188 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6189 "Allocator's incorrect dciForamt Fill for RNTI:%d",rbAllocInfo->rnti);
6196 * @brief This function fills the PDCCH DCI format 1 information from dlProc.
6200 * Function: rgSCHCmnFillHqPPdcchDciFrmt1
6201 * Purpose: This function fills in the PDCCH information
6202 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6203 * for dedicated service scheduling. It also
6204 * obtains TPC to be filled in from the power module.
6205 * Assign the PDCCH to HQProc.
6207 * Invoked by: Downlink Scheduler
6209 * @param[in] RgSchCellCb* cell
6210 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6211 * @param[in] RgDlHqProc* hqP
6212 * @param[out] RgSchPdcch *pdcch
6218 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1
6221 RgSchDlRbAlloc *rbAllocInfo,
6222 RgSchDlHqProcCb *hqP,
6227 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1(cell, rbAllocInfo, hqP, pdcch, tpc)
6229 RgSchDlRbAlloc *rbAllocInfo;
6230 RgSchDlHqProcCb *hqP;
6237 RgSchTddANInfo *anInfo;
6241 /* For activation or reactivation,
6242 * Harq ProcId should be 0 */
6243 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6246 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1)
6248 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6249 pdcch->dci.u.format1Info.tpcCmd = tpc;
6250 /* Avoiding this check,as we dont support Type1 RA */
6252 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6255 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
6256 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
6257 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6259 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
6260 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6262 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
6263 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6265 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
6266 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6271 if ((!(hqP->tbInfo[0].txCntr)) &&
6272 (cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6273 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6274 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV)))
6277 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6281 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6284 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6287 pdcch->dci.u.format1Info.allocInfo.ndi =
6288 rbAllocInfo->tbInfo[0].tbCb->ndi;
6289 pdcch->dci.u.format1Info.allocInfo.mcs =
6290 rbAllocInfo->tbInfo[0].imcs;
6291 pdcch->dci.u.format1Info.allocInfo.rv =
6292 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6294 if(hqP->hqE->ue != NULLP)
6297 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6298 hqP->hqE->cell->cellId,
6301 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6302 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6304 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6305 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6310 pdcch->dci.u.format1Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6314 /* Fixing DAI value - ccpu00109162 */
6315 pdcch->dci.u.format1Info.dai = RG_SCH_MAX_DAI_IDX;
6321 /* always 0 for RACH */
6322 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6324 /* Fixing DAI value - ccpu00109162 */
6325 pdcch->dci.u.format1Info.dai = 1;
6334 * @brief This function fills the PDCCH DCI format 1A information from dlProc.
6338 * Function: rgSCHCmnFillHqPPdcchDciFrmt1A
6339 * Purpose: This function fills in the PDCCH information
6340 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6341 * for dedicated service scheduling. It also
6342 * obtains TPC to be filled in from the power module.
6343 * Assign the PDCCH to HQProc.
6345 * Invoked by: Downlink Scheduler
6347 * @param[in] RgSchCellCb* cell
6348 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6349 * @param[in] RgDlHqProc* hqP
6350 * @param[out] RgSchPdcch *pdcch
6356 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A
6359 RgSchDlRbAlloc *rbAllocInfo,
6360 RgSchDlHqProcCb *hqP,
6365 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A(cell, rbAllocInfo, hqP, pdcch, tpc)
6367 RgSchDlRbAlloc *rbAllocInfo;
6368 RgSchDlHqProcCb *hqP;
6375 RgSchTddANInfo *anInfo;
6379 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6382 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1A)
6384 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6385 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
6386 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = tpc;
6387 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
6388 rbAllocInfo->tbInfo[0].imcs;
6389 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = TRUE;
6391 if ((!(hqP->tbInfo[0].txCntr)) &&
6392 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6393 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6394 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6397 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val = 0;
6401 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val
6405 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val =
6408 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = \
6409 rbAllocInfo->tbInfo[0].tbCb->ndi;
6410 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = \
6411 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6412 /* As of now, we do not support Distributed allocations */
6413 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
6414 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
6415 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
6417 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
6418 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6419 rbAllocInfo->allocInfo.raType2.rbStart,
6420 rbAllocInfo->allocInfo.raType2.numRb);
6422 if(hqP->hqE->ue != NULLP)
6425 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6426 hqP->hqE->cell->cellId,
6428 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6429 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6431 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6432 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6435 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6438 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val =
6439 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6443 /* Fixing DAI value - ccpu00109162 */
6444 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = RG_SCH_MAX_DAI_IDX;
6445 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6446 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6453 /* always 0 for RACH */
6454 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres
6457 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6458 /* Fixing DAI value - ccpu00109162 */
6459 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
6467 * @brief This function fills the PDCCH DCI format 1B information from dlProc.
6471 * Function: rgSCHCmnFillHqPPdcchDciFrmt1B
6472 * Purpose: This function fills in the PDCCH information
6473 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6474 * for dedicated service scheduling. It also
6475 * obtains TPC to be filled in from the power module.
6476 * Assign the PDCCH to HQProc.
6478 * Invoked by: Downlink Scheduler
6480 * @param[in] RgSchCellCb* cell
6481 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6482 * @param[in] RgDlHqProc* hqP
6483 * @param[out] RgSchPdcch *pdcch
6489 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B
6492 RgSchDlRbAlloc *rbAllocInfo,
6493 RgSchDlHqProcCb *hqP,
6498 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B(cell, rbAllocInfo, hqP, pdcch, tpc)
6500 RgSchDlRbAlloc *rbAllocInfo;
6501 RgSchDlHqProcCb *hqP;
6508 RgSchTddANInfo *anInfo;
6512 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6515 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1B)
6517 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6518 pdcch->dci.u.format1bInfo.tpcCmd = tpc;
6519 pdcch->dci.u.format1bInfo.allocInfo.mcs = \
6520 rbAllocInfo->tbInfo[0].imcs;
6522 if ((!(hqP->tbInfo[0].txCntr)) &&
6523 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6524 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6525 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6528 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = 0;
6532 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6535 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6537 pdcch->dci.u.format1bInfo.allocInfo.ndi = \
6538 rbAllocInfo->tbInfo[0].tbCb->ndi;
6539 pdcch->dci.u.format1bInfo.allocInfo.rv = \
6540 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6541 /* As of now, we do not support Distributed allocations */
6542 pdcch->dci.u.format1bInfo.allocInfo.isLocal = TRUE;
6543 pdcch->dci.u.format1bInfo.allocInfo.nGap2.pres = NOTPRSNT;
6544 pdcch->dci.u.format1bInfo.allocInfo.alloc.type =
6546 pdcch->dci.u.format1bInfo.allocInfo.alloc.u.riv =
6547 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6548 rbAllocInfo->allocInfo.raType2.rbStart,
6549 rbAllocInfo->allocInfo.raType2.numRb);
6550 /* Fill precoding Info */
6551 pdcch->dci.u.format1bInfo.allocInfo.pmiCfm = \
6552 rbAllocInfo->mimoAllocInfo.precIdxInfo >> 4;
6553 pdcch->dci.u.format1bInfo.allocInfo.tPmi = \
6554 rbAllocInfo->mimoAllocInfo.precIdxInfo & 0x0F;
6556 if(hqP->hqE->ue != NULLP)
6559 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6560 hqP->hqE->cell->cellId,
6562 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6563 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6565 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6566 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6571 pdcch->dci.u.format1bInfo.dai =
6572 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6576 pdcch->dci.u.format1bInfo.dai = RG_SCH_MAX_DAI_IDX;
6577 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6578 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6589 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
6593 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
6594 * Purpose: This function fills in the PDCCH information
6595 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6596 * for dedicated service scheduling. It also
6597 * obtains TPC to be filled in from the power module.
6598 * Assign the PDCCH to HQProc.
6600 * Invoked by: Downlink Scheduler
6602 * @param[in] RgSchCellCb* cell
6603 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6604 * @param[in] RgDlHqProc* hqP
6605 * @param[out] RgSchPdcch *pdcch
6611 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2
6614 RgSchDlRbAlloc *rbAllocInfo,
6615 RgSchDlHqProcCb *hqP,
6620 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2(cell, rbAllocInfo, hqP, pdcch, tpc)
6622 RgSchDlRbAlloc *rbAllocInfo;
6623 RgSchDlHqProcCb *hqP;
6630 RgSchTddANInfo *anInfo;
6634 /* ccpu00119023-ADD-For activation or reactivation,
6635 * Harq ProcId should be 0 */
6636 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6639 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2)
6641 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6642 /*ccpu00120365:-ADD-call also if tb is disabled */
6643 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6644 rbAllocInfo->tbInfo[1].isDisabled)
6646 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6648 pdcch->dci.u.format2Info.tpcCmd = tpc;
6649 /* Avoiding this check,as we dont support Type1 RA */
6651 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6654 pdcch->dci.u.format2Info.allocInfo.isAllocType0 = TRUE;
6655 pdcch->dci.u.format2Info.allocInfo.resAllocMap[0] =
6656 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6658 pdcch->dci.u.format2Info.allocInfo.resAllocMap[1] =
6659 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6661 pdcch->dci.u.format2Info.allocInfo.resAllocMap[2] =
6662 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6664 pdcch->dci.u.format2Info.allocInfo.resAllocMap[3] =
6665 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6670 if ((!(hqP->tbInfo[0].txCntr)) &&
6671 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6672 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6673 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6676 pdcch->dci.u.format2Info.allocInfo.harqProcId = 0;
6680 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6683 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6685 /* Initialize the TB info for both the TBs */
6686 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].mcs = 0;
6687 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].rv = 1;
6688 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].mcs = 0;
6689 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].rv = 1;
6690 /* Fill tbInfo for scheduled TBs */
6691 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6692 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6693 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6694 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6695 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6696 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6697 /* If we reach this function. It is safely assumed that
6698 * rbAllocInfo->tbInfo[0] always has non default valid values.
6699 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6700 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6702 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6703 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6704 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6705 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6706 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6707 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6709 pdcch->dci.u.format2Info.allocInfo.transSwap =
6710 rbAllocInfo->mimoAllocInfo.swpFlg;
6711 pdcch->dci.u.format2Info.allocInfo.precoding =
6712 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6714 if(hqP->hqE->ue != NULLP)
6718 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6719 hqP->hqE->cell->cellId,
6721 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6722 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6724 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6725 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6730 pdcch->dci.u.format2Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6734 pdcch->dci.u.format2Info.dai = RG_SCH_MAX_DAI_IDX;
6735 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6736 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6746 * @brief This function fills the PDCCH DCI format 2A information from dlProc.
6750 * Function: rgSCHCmnFillHqPPdcchDciFrmt2A
6751 * Purpose: This function fills in the PDCCH information
6752 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6753 * for dedicated service scheduling. It also
6754 * obtains TPC to be filled in from the power module.
6755 * Assign the PDCCH to HQProc.
6757 * Invoked by: Downlink Scheduler
6759 * @param[in] RgSchCellCb* cell
6760 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6761 * @param[in] RgDlHqProc* hqP
6762 * @param[out] RgSchPdcch *pdcch
6768 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A
6771 RgSchDlRbAlloc *rbAllocInfo,
6772 RgSchDlHqProcCb *hqP,
6777 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A(cell, rbAllocInfo, hqP, pdcch, tpc)
6779 RgSchDlRbAlloc *rbAllocInfo;
6780 RgSchDlHqProcCb *hqP;
6786 RgSchTddANInfo *anInfo;
6790 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6793 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2A)
6795 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6796 /*ccpu00120365:-ADD-call also if tb is disabled */
6797 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6798 rbAllocInfo->tbInfo[1].isDisabled)
6801 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6804 pdcch->dci.u.format2AInfo.tpcCmd = tpc;
6805 /* Avoiding this check,as we dont support Type1 RA */
6807 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6810 pdcch->dci.u.format2AInfo.allocInfo.isAllocType0 = TRUE;
6811 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[0] =
6812 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6814 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[1] =
6815 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6817 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[2] =
6818 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6820 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[3] =
6821 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6826 if ((!(hqP->tbInfo[0].txCntr)) &&
6827 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6828 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6829 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6832 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = 0;
6836 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6839 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6841 /* Initialize the TB info for both the TBs */
6842 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].mcs = 0;
6843 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].rv = 1;
6844 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].mcs = 0;
6845 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].rv = 1;
6846 /* Fill tbInfo for scheduled TBs */
6847 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6848 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6849 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6850 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6851 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6852 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6853 /* If we reach this function. It is safely assumed that
6854 * rbAllocInfo->tbInfo[0] always has non default valid values.
6855 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6857 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6859 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6860 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6861 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6862 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6863 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6864 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6867 pdcch->dci.u.format2AInfo.allocInfo.transSwap =
6868 rbAllocInfo->mimoAllocInfo.swpFlg;
6869 pdcch->dci.u.format2AInfo.allocInfo.precoding =
6870 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6872 if(hqP->hqE->ue != NULLP)
6875 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6876 hqP->hqE->cell->cellId,
6878 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6879 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6881 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6882 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6887 pdcch->dci.u.format2AInfo.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6891 pdcch->dci.u.format2AInfo.dai = RG_SCH_MAX_DAI_IDX;
6892 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6893 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6905 * @brief init of Sch vars.
6909 * Function: rgSCHCmnInitVars
6910 Purpose: Initialization of various UL subframe indices
6912 * @param[in] RgSchCellCb *cell
6917 PRIVATE Void rgSCHCmnInitVars
6922 PRIVATE Void rgSCHCmnInitVars(cell)
6926 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6928 TRC2(rgSCHCmnInitVars);
6930 cellUl->idx = RGSCH_INVALID_INFO;
6931 cellUl->schdIdx = RGSCH_INVALID_INFO;
6932 cellUl->schdHqProcIdx = RGSCH_INVALID_INFO;
6933 cellUl->msg3SchdIdx = RGSCH_INVALID_INFO;
6935 cellUl->emtcMsg3SchdIdx = RGSCH_INVALID_INFO;
6937 cellUl->msg3SchdHqProcIdx = RGSCH_INVALID_INFO;
6938 cellUl->rcpReqIdx = RGSCH_INVALID_INFO;
6939 cellUl->hqFdbkIdx[0] = RGSCH_INVALID_INFO;
6940 cellUl->hqFdbkIdx[1] = RGSCH_INVALID_INFO;
6941 cellUl->reTxIdx[0] = RGSCH_INVALID_INFO;
6942 cellUl->reTxIdx[1] = RGSCH_INVALID_INFO;
6943 /* Stack Crash problem for TRACE5 Changes. Added the return below */
6950 * @brief Updation of Sch vars per TTI.
6954 * Function: rgSCHCmnUpdVars
6955 * Purpose: Updation of Sch vars per TTI.
6957 * @param[in] RgSchCellCb *cell
6962 PUBLIC Void rgSCHCmnUpdVars
6967 PUBLIC Void rgSCHCmnUpdVars(cell)
6971 CmLteTimingInfo timeInfo;
6972 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6975 TRC2(rgSCHCmnUpdVars);
6977 idx = (cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot);
6978 cellUl->idx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6980 printf("idx %d cellUl->idx %d RGSCH_NUM_SUB_FRAMES_5G %d time(%d %d) \n",idx,cellUl->idx ,RGSCH_NUM_SUB_FRAMES_5G,cell->crntTime.sfn,cell->crntTime.slot);
6982 /* Need to scheduler for after SCHED_DELTA */
6983 /* UL allocation has been advanced by 1 subframe
6984 * so that we do not wrap around and send feedback
6985 * before the data is even received by the PHY */
6986 /* Introduced timing delta for UL control */
6987 idx = (cellUl->idx + TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA);
6988 cellUl->schdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6990 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
6991 TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA)
6992 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
6994 /* ccpu00127193 filling schdTime for logging and enhancement purpose*/
6995 cellUl->schdTime = timeInfo;
6997 /* msg3 scheduling two subframes after general scheduling */
6998 idx = (cellUl->idx + RG_SCH_CMN_DL_DELTA + RGSCH_RARSP_MSG3_DELTA);
6999 cellUl->msg3SchdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7001 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
7002 RG_SCH_CMN_DL_DELTA+ RGSCH_RARSP_MSG3_DELTA)
7003 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
7005 idx = (cellUl->idx + TFU_RECPREQ_DLDELTA);
7007 cellUl->rcpReqIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
7009 /* Downlink harq feedback is sometime after data reception / harq failure */
7010 /* Since feedback happens prior to scheduling being called, we add 1 to */
7011 /* take care of getting the correct subframe for feedback */
7012 idx = (cellUl->idx - TFU_CRCIND_ULDELTA + RG_SCH_CMN_UL_NUM_SF);
7014 printf("Finally setting cellUl->hqFdbkIdx[0] = %d TFU_CRCIND_ULDELTA %d RG_SCH_CMN_UL_NUM_SF %d\n",idx,TFU_CRCIND_ULDELTA,RG_SCH_CMN_UL_NUM_SF);
7016 cellUl->hqFdbkIdx[0] = (idx % (RG_SCH_CMN_UL_NUM_SF));
7018 idx = ((cellUl->schdIdx) % (RG_SCH_CMN_UL_NUM_SF));
7020 cellUl->reTxIdx[0] = (U8) idx;
7022 printf("cellUl->hqFdbkIdx[0] %d cellUl->reTxIdx[0] %d \n",cellUl->hqFdbkIdx[0], cellUl->reTxIdx[0] );
7024 /* RACHO: update cmn sched specific RACH variables,
7025 * mainly the prachMaskIndex */
7026 rgSCHCmnUpdRachParam(cell);
7035 * @brief To get uplink subframe index associated with current PHICH
7040 * Function: rgSCHCmnGetPhichUlSfIdx
7041 * Purpose: Gets uplink subframe index associated with current PHICH
7042 * transmission based on SFN and subframe no
7044 * @param[in] CmLteTimingInfo *timeInfo
7045 * @param[in] RgSchCellCb *cell
7050 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx
7052 CmLteTimingInfo *timeInfo,
7056 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx(timeInfo, cell)
7057 CmLteTimingInfo *timeInfo;
7061 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7063 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7069 TRC2(rgSCHCmnGetPhichUlSfIdx);
7071 dlsf = rgSCHUtlSubFrmGet(cell, *timeInfo);
7073 if(dlsf->phichOffInfo.sfnOffset == RGSCH_INVALID_INFO)
7075 RETVALUE(RGSCH_INVALID_INFO);
7077 subframe = dlsf->phichOffInfo.subframe;
7079 sfn = (RGSCH_MAX_SFN + timeInfo->sfn -
7080 dlsf->phichOffInfo.sfnOffset) % RGSCH_MAX_SFN;
7082 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7083 * wrap case such that idx will be proper*/
7084 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7085 numUlSf = ((numUlSf * sfn) + rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subframe]) - 1;
7086 idx = numUlSf % (cellUl->numUlSubfrms);
7092 * @brief To get uplink subframe index.
7097 * Function: rgSCHCmnGetUlSfIdx
7098 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7100 * @param[in] CmLteTimingInfo *timeInfo
7101 * @param[in] U8 ulDlCfgIdx
7106 PUBLIC U8 rgSCHCmnGetUlSfIdx
7108 CmLteTimingInfo *timeInfo,
7112 PUBLIC U8 rgSCHCmnGetUlSfIdx(timeInfo, cell)
7113 CmLteTimingInfo *timeInfo;
7117 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7118 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7122 TRC2(rgSCHCmnGetUlSfIdx);
7124 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7125 * wrap case such that idx will be proper*/
7126 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7127 numUlSf = ((numUlSf * timeInfo->sfn) + \
7128 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->subframe]) - 1;
7129 idx = numUlSf % (cellUl->numUlSubfrms);
7137 * @brief To get uplink hq index.
7142 * Function: rgSCHCmnGetUlHqProcIdx
7143 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7145 * @param[in] CmLteTimingInfo *timeInfo
7146 * @param[in] U8 ulDlCfgIdx
7151 PUBLIC U8 rgSCHCmnGetUlHqProcIdx
7153 CmLteTimingInfo *timeInfo,
7157 PUBLIC U8 rgSCHCmnGetUlHqProcIdx(timeInfo, cell)
7158 CmLteTimingInfo *timeInfo;
7166 numUlSf = (timeInfo->sfn * RGSCH_NUM_SUB_FRAMES_5G + timeInfo->slot);
7167 procId = numUlSf % RGSCH_NUM_UL_HQ_PROC;
7169 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7170 /*ccpu00130639 - MOD - To get correct UL HARQ Proc IDs for all UL/DL Configs*/
7172 S8 sfnCycle = cell->tddHqSfnCycle;
7173 U8 numUlHarq = rgSchTddUlNumHarqProcTbl[ulDlCfgIdx]
7175 /* TRACE 5 Changes */
7176 TRC2(rgSCHCmnGetUlHqProcIdx);
7178 /* Calculate the number of UL SF in one SFN */
7179 numUlSfInSfn = RGSCH_NUM_SUB_FRAMES -
7180 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7182 /* Check for the SFN wrap around case */
7183 if(cell->crntTime.sfn == 1023 && timeInfo->sfn == 0)
7187 else if(cell->crntTime.sfn == 0 && timeInfo->sfn == 1023)
7189 /* sfnCycle decremented by 1 */
7190 sfnCycle = (sfnCycle + numUlHarq-1) % numUlHarq;
7192 /* Calculate the total number of UL sf */
7193 /* -1 is done since uplink sf are counted from 0 */
7194 numUlSf = numUlSfInSfn * (timeInfo->sfn + (sfnCycle*1024)) +
7195 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->slot] - 1;
7197 procId = numUlSf % numUlHarq;
7203 /* UL_ALLOC_CHANGES */
7204 /***********************************************************
7206 * Func : rgSCHCmnUlFreeAlloc
7208 * Desc : Free an allocation - invokes UHM and releases
7209 * alloc for the scheduler
7210 * Doest need subframe as argument
7218 **********************************************************/
7220 PUBLIC Void rgSCHCmnUlFreeAlloc
7226 PUBLIC Void rgSCHCmnUlFreeAlloc(cell, alloc)
7228 RgSchUlAlloc *alloc;
7231 RgSchUlHqProcCb *hqProc;
7232 TRC2(rgSCHCmnUlFreeAllocation);
7236 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7237 if ((alloc->hqProc->remTx == 0) &&
7238 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7241 RgSchRaCb *raCb = alloc->raCb;
7242 rgSCHUhmFreeProc(alloc->hqProc, cell);
7243 rgSCHUtlUlAllocRelease(alloc);
7244 rgSCHRamDelRaCb(cell, raCb, TRUE);
7249 hqProc = alloc->hqProc;
7250 rgSCHUtlUlAllocRelease(alloc);
7251 rgSCHUhmFreeProc(hqProc, cell);
7256 /***********************************************************
7258 * Func : rgSCHCmnUlFreeAllocation
7260 * Desc : Free an allocation - invokes UHM and releases
7261 * alloc for the scheduler
7269 **********************************************************/
7271 PUBLIC Void rgSCHCmnUlFreeAllocation
7278 PUBLIC Void rgSCHCmnUlFreeAllocation(cell, sf, alloc)
7281 RgSchUlAlloc *alloc;
7284 RgSchUlHqProcCb *hqProc;
7286 TRC2(rgSCHCmnUlFreeAllocation);
7290 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7291 if ((alloc->hqProc->remTx == 0) &&
7292 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7295 RgSchRaCb *raCb = alloc->raCb;
7296 rgSCHUhmFreeProc(alloc->hqProc, cell);
7297 rgSCHUtlUlAllocRls(sf, alloc);
7298 rgSCHRamDelRaCb(cell, raCb, TRUE);
7303 hqProc = alloc->hqProc;
7304 rgSCHUhmFreeProc(hqProc, cell);
7306 /* re-setting the PRB count while freeing the allocations */
7309 rgSCHUtlUlAllocRls(sf, alloc);
7315 * @brief This function implements PDCCH allocation for an UE
7316 * in the currently running subframe.
7320 * Function: rgSCHCmnPdcchAllocCrntSf
7321 * Purpose: This function determines current DL subframe
7322 * and UE DL CQI to call the actual pdcch allocator
7324 * Note that this function is called only
7325 * when PDCCH request needs to be made during
7326 * uplink scheduling.
7328 * Invoked by: Scheduler
7330 * @param[in] RgSchCellCb *cell
7331 * @param[in] RgSchUeCb *ue
7332 * @return RgSchPdcch *
7333 * -# NULLP when unsuccessful
7336 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf
7342 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf(cell, ue)
7347 CmLteTimingInfo frm = cell->crntTime;
7348 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7350 RgSchPdcch *pdcch = NULLP;
7352 TRC2(rgSCHCmnPdcchAllocCrntSf);
7353 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7354 sf = rgSCHUtlSubFrmGet(cell, frm);
7357 if (ue->allocCmnUlPdcch)
7359 pdcch = rgSCHCmnCmnPdcchAlloc(cell, sf);
7360 /* Since CRNTI Scrambled */
7363 pdcch->dciNumOfBits = ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
7369 //pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, y, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_0, FALSE);
7370 pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_A1, FALSE);
7375 /***********************************************************
7377 * Func : rgSCHCmnUlAllocFillNdmrs
7379 * Desc : Determines and fills N_dmrs for a UE uplink
7384 * Notes: N_dmrs determination is straightforward, so
7385 * it is configured per subband
7389 **********************************************************/
7391 PUBLIC Void rgSCHCmnUlAllocFillNdmrs
7393 RgSchCmnUlCell *cellUl,
7397 PUBLIC Void rgSCHCmnUlAllocFillNdmrs(cellUl, alloc)
7398 RgSchCmnUlCell *cellUl;
7399 RgSchUlAlloc *alloc;
7402 TRC2(rgSCHCmnUlAllocFillNdmrs);
7403 alloc->grnt.nDmrs = cellUl->dmrsArr[alloc->sbStart];
7407 /***********************************************************
7409 * Func : rgSCHCmnUlAllocLnkHqProc
7411 * Desc : Links a new allocation for an UE with the
7412 * appropriate HARQ process of the UE.
7420 **********************************************************/
7422 PUBLIC Void rgSCHCmnUlAllocLnkHqProc
7425 RgSchUlAlloc *alloc,
7426 RgSchUlHqProcCb *proc,
7430 PUBLIC Void rgSCHCmnUlAllocLnkHqProc(ue, alloc, proc, isRetx)
7432 RgSchUlAlloc *alloc;
7433 RgSchUlHqProcCb *proc;
7437 TRC2(rgSCHCmnUlAllocLnkHqProc);
7441 rgSCHCmnUlAdapRetx(alloc, proc);
7445 #ifdef LTE_L2_MEAS /* L2_COUNTERS */
7448 rgSCHUhmNewTx(proc, (((RgUeUlHqCb*)proc->hqEnt)->maxHqRetx), alloc);
7454 * @brief This function releases a PDCCH in the subframe that is
7455 * currently being allocated for.
7459 * Function: rgSCHCmnPdcchRlsCrntSf
7460 * Purpose: This function determines current DL subframe
7461 * which is considered for PDCCH allocation,
7462 * and then calls the actual function that
7463 * releases a PDCCH in a specific subframe.
7464 * Note that this function is called only
7465 * when PDCCH release needs to be made during
7466 * uplink scheduling.
7468 * Invoked by: Scheduler
7470 * @param[in] RgSchCellCb *cell
7471 * @param[in] RgSchPdcch *pdcch
7475 PUBLIC Void rgSCHCmnPdcchRlsCrntSf
7481 PUBLIC Void rgSCHCmnPdcchRlsCrntSf(cell, pdcch)
7486 CmLteTimingInfo frm = cell->crntTime;
7489 TRC2(rgSCHCmnPdcchRlsCrntSf);
7491 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7492 sf = rgSCHUtlSubFrmGet(cell, frm);
7493 rgSCHUtlPdcchPut(cell, &sf->pdcchInfo, pdcch);
7496 /***********************************************************
7498 * Func : rgSCHCmnUlFillPdcchWithAlloc
7500 * Desc : Fills a PDCCH with format 0 information.
7508 **********************************************************/
7510 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc
7513 RgSchUlAlloc *alloc,
7517 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc(pdcch, alloc, ue)
7519 RgSchUlAlloc *alloc;
7524 TRC2(rgSCHCmnUlFillPdcchWithAlloc);
7527 pdcch->rnti = alloc->rnti;
7528 //pdcch->dci.dciFormat = TFU_DCI_FORMAT_A2;
7529 pdcch->dci.dciFormat = alloc->grnt.dciFrmt;
7531 //Currently hardcoding values here.
7532 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
7533 switch(pdcch->dci.dciFormat)
7535 case TFU_DCI_FORMAT_A1:
7537 pdcch->dci.u.formatA1Info.formatType = 0;
7538 pdcch->dci.u.formatA1Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7539 pdcch->dci.u.formatA1Info.xPUSCH_TxTiming = 0;
7540 pdcch->dci.u.formatA1Info.RBAssign = alloc->grnt.rbAssign;
7541 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7542 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7543 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7544 pdcch->dci.u.formatA1Info.CSI_BSI_BRI_Req = 0;
7545 pdcch->dci.u.formatA1Info.CSIRS_BRRS_TxTiming = 0;
7546 pdcch->dci.u.formatA1Info.CSIRS_BRRS_SymbIdx = 0;
7547 pdcch->dci.u.formatA1Info.CSIRS_BRRS_ProcInd = 0;
7548 pdcch->dci.u.formatA1Info.numBSI_Reports = 0;
7549 pdcch->dci.u.formatA1Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7550 pdcch->dci.u.formatA1Info.beamSwitch = 0;
7551 pdcch->dci.u.formatA1Info.SRS_Config = 0;
7552 pdcch->dci.u.formatA1Info.SRS_Symbol = 0;
7553 pdcch->dci.u.formatA1Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7554 pdcch->dci.u.formatA1Info.SCID = alloc->grnt.SCID;
7555 pdcch->dci.u.formatA1Info.PMI = alloc->grnt.PMI;
7556 pdcch->dci.u.formatA1Info.UL_PCRS = 0;
7557 pdcch->dci.u.formatA1Info.tpcCmd = alloc->grnt.tpc;
7560 case TFU_DCI_FORMAT_A2:
7562 pdcch->dci.u.formatA2Info.formatType = 1;
7563 pdcch->dci.u.formatA2Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7564 pdcch->dci.u.formatA2Info.xPUSCH_TxTiming = 0;
7565 pdcch->dci.u.formatA2Info.RBAssign = alloc->grnt.rbAssign;
7566 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7567 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7568 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7569 pdcch->dci.u.formatA2Info.CSI_BSI_BRI_Req = 0;
7570 pdcch->dci.u.formatA2Info.CSIRS_BRRS_TxTiming = 0;
7571 pdcch->dci.u.formatA2Info.CSIRS_BRRS_SymbIdx = 0;
7572 pdcch->dci.u.formatA2Info.CSIRS_BRRS_ProcInd = 0;
7573 pdcch->dci.u.formatA2Info.numBSI_Reports = 0;
7574 pdcch->dci.u.formatA2Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7575 pdcch->dci.u.formatA2Info.beamSwitch = 0;
7576 pdcch->dci.u.formatA2Info.SRS_Config = 0;
7577 pdcch->dci.u.formatA2Info.SRS_Symbol = 0;
7578 pdcch->dci.u.formatA2Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7579 pdcch->dci.u.formatA2Info.SCID = alloc->grnt.SCID;
7580 pdcch->dci.u.formatA2Info.PMI = alloc->grnt.PMI;
7581 pdcch->dci.u.formatA2Info.UL_PCRS = 0;
7582 pdcch->dci.u.formatA2Info.tpcCmd = alloc->grnt.tpc;
7586 RLOG1(L_ERROR," 5GTF_ERROR UL Allocator's icorrect "
7587 "dciForamt Fill RNTI:%d",alloc->rnti);
7595 /***********************************************************
7597 * Func : rgSCHCmnUlAllocFillTpc
7599 * Desc : Determines and fills TPC for an UE allocation.
7607 **********************************************************/
7609 PUBLIC Void rgSCHCmnUlAllocFillTpc
7616 PUBLIC Void rgSCHCmnUlAllocFillTpc(cell, ue, alloc)
7619 RgSchUlAlloc *alloc;
7622 TRC2(rgSCHCmnUlAllocFillTpc);
7623 alloc->grnt.tpc = rgSCHPwrPuschTpcForUe(cell, ue);
7628 /***********************************************************
7630 * Func : rgSCHCmnAddUeToRefreshQ
7632 * Desc : Adds a UE to refresh queue, so that the UE is
7633 * periodically triggered to refresh it's GBR and
7642 **********************************************************/
7644 PRIVATE Void rgSCHCmnAddUeToRefreshQ
7651 PRIVATE Void rgSCHCmnAddUeToRefreshQ(cell, ue, wait)
7657 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
7659 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
7661 TRC2(rgSCHCmnAddUeToRefreshQ);
7664 cmMemset((U8 *)&arg, 0, sizeof(arg));
7665 arg.tqCp = &sched->tmrTqCp;
7666 arg.tq = sched->tmrTq;
7667 arg.timers = &ueSchd->tmr;
7671 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
7678 * @brief Perform UE reset procedure.
7682 * Function : rgSCHCmnUlUeReset
7684 * This functions performs BSR resetting and
7685 * triggers UL specific scheduler
7686 * to Perform UE reset procedure.
7688 * @param[in] RgSchCellCb *cell
7689 * @param[in] RgSchUeCb *ue
7693 PRIVATE Void rgSCHCmnUlUeReset
7699 PRIVATE Void rgSCHCmnUlUeReset(cell, ue)
7704 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7705 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7707 RgSchCmnLcg *lcgCmn;
7709 RgSchCmnAllocRecord *allRcd;
7710 TRC2(rgSCHCmnUlUeReset);
7712 ue->ul.minReqBytes = 0;
7713 ue->ul.totalBsr = 0;
7715 ue->ul.nonGbrLcgBs = 0;
7716 ue->ul.effAmbr = ue->ul.cfgdAmbr;
7718 node = ueUl->ulAllocLst.first;
7721 allRcd = (RgSchCmnAllocRecord *)node->node;
7725 for(lcgCnt = 0; lcgCnt < RGSCH_MAX_LCG_PER_UE; lcgCnt++)
7727 lcgCmn = RG_SCH_CMN_GET_UL_LCG(&ue->ul.lcgArr[lcgCnt]);
7729 lcgCmn->reportedBs = 0;
7730 lcgCmn->effGbr = lcgCmn->cfgdGbr;
7731 lcgCmn->effDeltaMbr = lcgCmn->deltaMbr;
7733 rgSCHCmnUlUeDelAllocs(cell, ue);
7735 ue->isSrGrant = FALSE;
7737 cellSchd->apisUl->rgSCHUlUeReset(cell, ue);
7739 /* Stack Crash problem for TRACE5 changes. Added the return below */
7745 * @brief RESET UL CQI and DL CQI&RI to conservative values
7746 * for a reestablishing UE.
7750 * Function : rgSCHCmnResetRiCqi
7752 * RESET UL CQI and DL CQI&RI to conservative values
7753 * for a reestablishing UE
7755 * @param[in] RgSchCellCb *cell
7756 * @param[in] RgSchUeCb *ue
7760 PRIVATE Void rgSCHCmnResetRiCqi
7766 PRIVATE Void rgSCHCmnResetRiCqi(cell, ue)
7771 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7772 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
7773 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7774 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7776 TRC2(rgSCHCmnResetRiCqi);
7778 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
7779 cell->isCpUlExtend);
7781 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
7782 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
7783 ueDl->mimoInfo.ri = 1;
7784 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
7785 (ue->mimoInfo.txMode == RGR_UE_TM_6))
7787 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
7789 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
7791 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
7794 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
7796 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
7800 /* Request for an early Aper CQI in case of reest */
7801 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
7802 if(acqiCb && acqiCb->aCqiCfg.pres)
7804 acqiCb->aCqiTrigWt = 0;
7812 * @brief Perform UE reset procedure.
7816 * Function : rgSCHCmnDlUeReset
7818 * This functions performs BO resetting and
7819 * triggers DL specific scheduler
7820 * to Perform UE reset procedure.
7822 * @param[in] RgSchCellCb *cell
7823 * @param[in] RgSchUeCb *ue
7827 PRIVATE Void rgSCHCmnDlUeReset
7833 PRIVATE Void rgSCHCmnDlUeReset(cell, ue)
7838 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7839 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
7840 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7842 TRC2(rgSCHCmnDlUeReset);
7844 if (ueDl->rachInfo.poLnk.node != NULLP)
7846 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
7849 /* Fix: syed Remove from TA List if this UE is there.
7850 * If TA Timer is running. Stop it */
7851 if (ue->dlTaLnk.node)
7853 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
7854 ue->dlTaLnk.node = (PTR)NULLP;
7856 else if (ue->taTmr.tmrEvnt != TMR_NONE)
7858 rgSCHTmrStopTmr(cell, ue->taTmr.tmrEvnt, ue);
7861 cellSchd->apisDl->rgSCHDlUeReset(cell, ue);
7865 rgSCHSCellDlUeReset(cell,ue);
7871 * @brief Perform UE reset procedure.
7875 * Function : rgSCHCmnUeReset
7877 * This functions triggers specific scheduler
7878 * to Perform UE reset procedure.
7880 * @param[in] RgSchCellCb *cell
7881 * @param[in] RgSchUeCb *ue
7887 PUBLIC Void rgSCHCmnUeReset
7893 PUBLIC Void rgSCHCmnUeReset(cell, ue)
7900 RgInfResetHqEnt hqEntRstInfo;
7902 TRC2(rgSCHCmnUeReset);
7903 /* RACHO: remove UE from pdcch, handover and rapId assoc Qs */
7904 rgSCHCmnDelRachInfo(cell, ue);
7906 rgSCHPwrUeReset(cell, ue);
7908 rgSCHCmnUlUeReset(cell, ue);
7909 rgSCHCmnDlUeReset(cell, ue);
7912 /* Making allocCmnUlPdcch TRUE to allocate DCI0/1A from Common search space.
7913 As because multiple cells are added hence 2 bits CqiReq is there
7914 This flag will be set to FALSE once we will get Scell READY */
7915 ue->allocCmnUlPdcch = TRUE;
7918 /* Fix : syed RESET UL CQI and DL CQI&RI to conservative values
7919 * for a reestablishing UE */
7920 /*Reset Cqi Config for all the configured cells*/
7921 for (idx = 0;idx < CM_LTE_MAX_CELLS; idx++)
7923 if (ue->cellInfo[idx] != NULLP)
7925 rgSCHCmnResetRiCqi(ue->cellInfo[idx]->cell, ue);
7928 /*After Reset Trigger APCQI for Pcell*/
7929 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
7930 if(pCellInfo->acqiCb.aCqiCfg.pres)
7932 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
7935 /* sending HqEnt reset to MAC */
7936 hqEntRstInfo.cellId = cell->cellId;
7937 hqEntRstInfo.crnti = ue->ueId;
7939 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
7940 RgSchMacRstHqEnt(&pst,&hqEntRstInfo);
7946 * @brief UE out of MeasGap or AckNackReptn.
7950 * Function : rgSCHCmnActvtUlUe
7952 * This functions triggers specific scheduler
7953 * to start considering it for scheduling.
7955 * @param[in] RgSchCellCb *cell
7956 * @param[in] RgSchUeCb *ue
7962 PUBLIC Void rgSCHCmnActvtUlUe
7968 PUBLIC Void rgSCHCmnActvtUlUe(cell, ue)
7973 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7974 TRC2(rgSCHCmnActvtUlUe);
7976 /* : take care of this in UL retransmission */
7977 cellSchd->apisUl->rgSCHUlActvtUe(cell, ue);
7982 * @brief UE out of MeasGap or AckNackReptn.
7986 * Function : rgSCHCmnActvtDlUe
7988 * This functions triggers specific scheduler
7989 * to start considering it for scheduling.
7991 * @param[in] RgSchCellCb *cell
7992 * @param[in] RgSchUeCb *ue
7998 PUBLIC Void rgSCHCmnActvtDlUe
8004 PUBLIC Void rgSCHCmnActvtDlUe(cell, ue)
8009 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8010 TRC2(rgSCHCmnActvtDlUe);
8012 cellSchd->apisDl->rgSCHDlActvtUe(cell, ue);
8017 * @brief This API is invoked to indicate scheduler of a CRC indication.
8021 * Function : rgSCHCmnHdlUlTransInd
8022 * This API is invoked to indicate scheduler of a CRC indication.
8024 * @param[in] RgSchCellCb *cell
8025 * @param[in] RgSchUeCb *ue
8026 * @param[in] CmLteTimingInfo timingInfo
8031 PUBLIC Void rgSCHCmnHdlUlTransInd
8035 CmLteTimingInfo timingInfo
8038 PUBLIC Void rgSCHCmnHdlUlTransInd(cell, ue, timingInfo)
8041 CmLteTimingInfo timingInfo;
8044 TRC2(rgSCHCmnHdlUlTransInd);
8046 /* Update the latest UL dat/sig transmission time */
8047 RGSCHCPYTIMEINFO(timingInfo, ue->ul.ulTransTime);
8048 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
8050 /* Some UL Transmission from this UE.
8051 * Activate this UE if it was inactive */
8052 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8053 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8061 * @brief Compute the minimum Rank based on Codebook subset
8062 * restriction configuration for 4 Tx Ports and Tx Mode 4.
8066 * Function : rgSCHCmnComp4TxMode4
8068 * Depending on BitMap set at CBSR during Configuration
8069 * - return the least possible Rank
8072 * @param[in] U32 *pmiBitMap
8073 * @return RgSchCmnRank
8076 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4
8081 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4(pmiBitMap)
8085 U32 bitMap0, bitMap1;
8086 TRC2(rgSCHCmnComp4TxMode4);
8087 bitMap0 = pmiBitMap[0];
8088 bitMap1 = pmiBitMap[1];
8089 if((bitMap1) & 0xFFFF)
8091 RETVALUE (RG_SCH_CMN_RANK_1);
8093 else if((bitMap1>>16) & 0xFFFF)
8095 RETVALUE (RG_SCH_CMN_RANK_2);
8097 else if((bitMap0) & 0xFFFF)
8099 RETVALUE (RG_SCH_CMN_RANK_3);
8101 else if((bitMap0>>16) & 0xFFFF)
8103 RETVALUE (RG_SCH_CMN_RANK_4);
8107 RETVALUE (RG_SCH_CMN_RANK_1);
8113 * @brief Compute the minimum Rank based on Codebook subset
8114 * restriction configuration for 2 Tx Ports and Tx Mode 4.
8118 * Function : rgSCHCmnComp2TxMode4
8120 * Depending on BitMap set at CBSR during Configuration
8121 * - return the least possible Rank
8124 * @param[in] U32 *pmiBitMap
8125 * @return RgSchCmnRank
8128 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4
8133 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4(pmiBitMap)
8138 TRC2(rgSCHCmnComp2TxMode4);
8139 bitMap0 = pmiBitMap[0];
8140 if((bitMap0>>26)& 0x0F)
8142 RETVALUE (RG_SCH_CMN_RANK_1);
8144 else if((bitMap0>>30) & 3)
8146 RETVALUE (RG_SCH_CMN_RANK_2);
8150 RETVALUE (RG_SCH_CMN_RANK_1);
8155 * @brief Compute the minimum Rank based on Codebook subset
8156 * restriction configuration for 4 Tx Ports and Tx Mode 3.
8160 * Function : rgSCHCmnComp4TxMode3
8162 * Depending on BitMap set at CBSR during Configuration
8163 * - return the least possible Rank
8166 * @param[in] U32 *pmiBitMap
8167 * @return RgSchCmnRank
8170 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3
8175 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3(pmiBitMap)
8180 TRC2(rgSCHCmnComp4TxMode3);
8181 bitMap0 = pmiBitMap[0];
8182 if((bitMap0>>28)& 1)
8184 RETVALUE (RG_SCH_CMN_RANK_1);
8186 else if((bitMap0>>29) &1)
8188 RETVALUE (RG_SCH_CMN_RANK_2);
8190 else if((bitMap0>>30) &1)
8192 RETVALUE (RG_SCH_CMN_RANK_3);
8194 else if((bitMap0>>31) &1)
8196 RETVALUE (RG_SCH_CMN_RANK_4);
8200 RETVALUE (RG_SCH_CMN_RANK_1);
8205 * @brief Compute the minimum Rank based on Codebook subset
8206 * restriction configuration for 2 Tx Ports and Tx Mode 3.
8210 * Function : rgSCHCmnComp2TxMode3
8212 * Depending on BitMap set at CBSR during Configuration
8213 * - return the least possible Rank
8216 * @param[in] U32 *pmiBitMap
8217 * @return RgSchCmnRank
8220 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3
8225 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3(pmiBitMap)
8230 TRC2(rgSCHCmnComp2TxMode3);
8231 bitMap0 = pmiBitMap[0];
8232 if((bitMap0>>30)& 1)
8234 RETVALUE (RG_SCH_CMN_RANK_1);
8236 else if((bitMap0>>31) &1)
8238 RETVALUE (RG_SCH_CMN_RANK_2);
8242 RETVALUE (RG_SCH_CMN_RANK_1);
8247 * @brief Compute the minimum Rank based on Codebook subset
8248 * restriction configuration.
8252 * Function : rgSCHCmnComputeRank
8254 * Depending on Num Tx Ports and Transmission mode
8255 * - return the least possible Rank
8258 * @param[in] RgrTxMode txMode
8259 * @param[in] U32 *pmiBitMap
8260 * @param[in] U8 numTxPorts
8261 * @return RgSchCmnRank
8264 PRIVATE RgSchCmnRank rgSCHCmnComputeRank
8271 PRIVATE RgSchCmnRank rgSCHCmnComputeRank(txMode, pmiBitMap, numTxPorts)
8277 TRC2(rgSCHCmnComputeRank);
8279 if (numTxPorts ==2 && txMode == RGR_UE_TM_3)
8281 RETVALUE (rgSCHCmnComp2TxMode3(pmiBitMap));
8283 else if (numTxPorts ==4 && txMode == RGR_UE_TM_3)
8285 RETVALUE (rgSCHCmnComp4TxMode3(pmiBitMap));
8287 else if (numTxPorts ==2 && txMode == RGR_UE_TM_4)
8289 RETVALUE (rgSCHCmnComp2TxMode4(pmiBitMap));
8291 else if (numTxPorts ==4 && txMode == RGR_UE_TM_4)
8293 RETVALUE (rgSCHCmnComp4TxMode4(pmiBitMap));
8297 RETVALUE (RG_SCH_CMN_RANK_1);
8304 * @brief Harq Entity Deinitialization for CMN SCH.
8308 * Function : rgSCHCmnDlDeInitHqEnt
8310 * Harq Entity Deinitialization for CMN SCH
8312 * @param[in] RgSchCellCb *cell
8313 * @param[in] RgSchDlHqEnt *hqE
8316 /*KWORK_FIX:Changed function return type to void */
8318 PUBLIC Void rgSCHCmnDlDeInitHqEnt
8324 PUBLIC Void rgSCHCmnDlDeInitHqEnt(cell, hqE)
8329 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8330 RgSchDlHqProcCb *hqP;
8334 TRC2(rgSCHCmnDlDeInitHqEnt);
8336 ret = cellSchd->apisDl->rgSCHDlUeHqEntDeInit(cell, hqE);
8337 /* Free only If the Harq proc are created*/
8342 for(cnt = 0; cnt < hqE->numHqPrcs; cnt++)
8344 hqP = &hqE->procs[cnt];
8345 if ((RG_SCH_CMN_GET_DL_HQP(hqP)))
8347 rgSCHUtlFreeSBuf(cell->instIdx,
8348 (Data**)(&(hqP->sch)), (sizeof(RgSchCmnDlHqProc)));
8352 rgSCHLaaDeInitDlHqProcCb (cell, hqE);
8359 * @brief Harq Entity initialization for CMN SCH.
8363 * Function : rgSCHCmnDlInitHqEnt
8365 * Harq Entity initialization for CMN SCH
8367 * @param[in] RgSchCellCb *cell
8368 * @param[in] RgSchUeCb *ue
8374 PUBLIC S16 rgSCHCmnDlInitHqEnt
8380 PUBLIC S16 rgSCHCmnDlInitHqEnt(cell, hqEnt)
8382 RgSchDlHqEnt *hqEnt;
8386 RgSchDlHqProcCb *hqP;
8389 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8390 TRC2(rgSCHCmnDlInitHqEnt);
8392 for(cnt = 0; cnt < hqEnt->numHqPrcs; cnt++)
8394 hqP = &hqEnt->procs[cnt];
8395 if (rgSCHUtlAllocSBuf(cell->instIdx,
8396 (Data**)&(hqP->sch), (sizeof(RgSchCmnDlHqProc))) != ROK)
8402 if((cell->emtcEnable) &&(hqEnt->ue->isEmtcUe))
8404 if(ROK != cellSchd->apisEmtcDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8413 if(ROK != cellSchd->apisDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8420 } /* rgSCHCmnDlInitHqEnt */
8423 * @brief This function computes distribution of refresh period
8427 * Function: rgSCHCmnGetRefreshDist
8428 * Purpose: This function computes distribution of refresh period
8429 * This is required to align set of UEs refresh
8430 * around the different consecutive subframe.
8432 * Invoked by: rgSCHCmnGetRefreshPerDist
8434 * @param[in] RgSchCellCb *cell
8435 * @param[in] RgSchUeCb *ue
8440 PRIVATE U8 rgSCHCmnGetRefreshDist
8446 PRIVATE U8 rgSCHCmnGetRefreshDist(cell, ue)
8453 Inst inst = cell->instIdx;
8455 TRC2(rgSCHCmnGetRefreshDist);
8457 for(refOffst = 0; refOffst < RGSCH_MAX_REFRESH_OFFSET; refOffst++)
8459 if(cell->refreshUeCnt[refOffst] < RGSCH_MAX_REFRESH_GRPSZ)
8461 cell->refreshUeCnt[refOffst]++;
8462 ue->refreshOffset = refOffst;
8463 /* printf("UE[%d] refresh offset[%d]. Cell refresh ue count[%d].\n", ue->ueId, refOffst, cell->refreshUeCnt[refOffst]); */
8468 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Allocation of refresh distribution failed\n"));
8469 /* We should not enter here normally, but incase of failure, allocating from last offset*/
8470 cell->refreshUeCnt[refOffst-1]++;
8471 ue->refreshOffset = refOffst-1;
8473 RETVALUE(refOffst-1);
8476 * @brief This function computes initial Refresh Wait Period.
8480 * Function: rgSCHCmnGetRefreshPer
8481 * Purpose: This function computes initial Refresh Wait Period.
8482 * This is required to align multiple UEs refresh
8483 * around the same time.
8485 * Invoked by: rgSCHCmnGetRefreshPer
8487 * @param[in] RgSchCellCb *cell
8488 * @param[in] RgSchUeCb *ue
8489 * @param[in] U32 *waitPer
8494 PRIVATE Void rgSCHCmnGetRefreshPer
8501 PRIVATE Void rgSCHCmnGetRefreshPer(cell, ue, waitPer)
8510 TRC2(rgSCHCmnGetRefreshPer);
8512 refreshPer = RG_SCH_CMN_REFRESH_TIME * RG_SCH_CMN_REFRESH_TIMERES;
8513 crntSubFrm = cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot;
8514 /* Fix: syed align multiple UEs to refresh at same time */
8515 *waitPer = refreshPer - (crntSubFrm % refreshPer);
8516 *waitPer = RGSCH_CEIL(*waitPer, RG_SCH_CMN_REFRESH_TIMERES);
8517 *waitPer = *waitPer + rgSCHCmnGetRefreshDist(cell, ue);
8525 * @brief UE initialisation for scheduler.
8529 * Function : rgSCHCmnRgrSCellUeCfg
8531 * This functions intialises UE specific scheduler
8532 * information for SCELL
8533 * 0. Perform basic validations
8534 * 1. Allocate common sched UE cntrl blk
8535 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8537 * 4. Perform DLFS cfg
8539 * @param[in] RgSchCellCb *cell
8540 * @param[in] RgSchUeCb *ue
8541 * @param[out] RgSchErrInfo *err
8547 PUBLIC S16 rgSCHCmnRgrSCellUeCfg
8551 RgrUeSecCellCfg *sCellInfoCfg,
8555 PUBLIC S16 rgSCHCmnRgrSCellUeCfg(sCell, ue, sCellInfoCfg, err)
8558 RgrUeSecCellCfg *sCellInfoCfg;
8565 RgSchCmnAllocRecord *allRcd;
8566 RgSchDlRbAlloc *allocInfo;
8567 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8569 RgSchCmnUlUe *ueUlPcell;
8570 RgSchCmnUe *pCellUeSchCmn;
8571 RgSchCmnUe *ueSchCmn;
8573 RgSchCmnDlUe *pCellUeDl;
8575 Inst inst = ue->cell->instIdx;
8577 U32 idx = (U8)((sCell->cellId - rgSchCb[sCell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8578 TRC2(rgSCHCmnRgrSCellUeCfg);
8580 pCellUeSchCmn = RG_SCH_CMN_GET_UE(ue,ue->cell);
8581 pCellUeDl = &pCellUeSchCmn->dl;
8583 /* 1. Allocate Common sched control block */
8584 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8585 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8587 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Memory allocation FAILED\n"));
8588 err->errCause = RGSCHERR_SCH_CFG;
8591 ueSchCmn = RG_SCH_CMN_GET_UE(ue,sCell);
8593 /*2. Perform UEs downlink configuration */
8594 ueDl = &ueSchCmn->dl;
8597 ueDl->mimoInfo = pCellUeDl->mimoInfo;
8599 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
8600 (ue->mimoInfo.txMode == RGR_UE_TM_6))
8602 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_NO_PMI);
8604 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
8606 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_RI_1);
8608 RGSCH_ARRAY_BOUND_CHECK(sCell->instIdx, rgUeCatTbl, pCellUeSchCmn->cmn.ueCat);
8609 ueDl->maxTbBits = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlTbBits;
8612 ri = RGSCH_MIN(ri, sCell->numTxAntPorts);
8613 if(((CM_LTE_UE_CAT_6 == pCellUeSchCmn->cmn.ueCat )
8614 ||(CM_LTE_UE_CAT_7 == pCellUeSchCmn->cmn.ueCat))
8617 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[1];
8621 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[0];
8624 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
8626 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8627 rgSchTddDlNumHarqProcTbl[sCell->ulDlCfgIdx]);
8629 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8630 RGSCH_NUM_DL_HQ_PROC);
8633 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, ue->isEmtcUe);
8635 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, FALSE);
8639 /* ambrCfgd config moved to ueCb.dl, as it's not needed for per cell wise*/
8641 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, sCell);
8642 allocInfo->rnti = ue->ueId;
8644 /* Initializing the lastCfi value to current cfi value */
8645 ueDl->lastCfi = cellSchd->dl.currCfi;
8647 if ((cellSchd->apisDl->rgSCHRgrSCellDlUeCfg(sCell, ue, err)) != ROK)
8649 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Spec Sched DL UE CFG FAILED\n"));
8653 /* TODO: enhance for DLFS RB Allocation for SCELLs in future dev */
8655 /* DLFS UE Config */
8656 if (cellSchd->dl.isDlFreqSel)
8658 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeCfg(sCell, ue, sCellInfoCfg, err)) != ROK)
8660 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS UE config FAILED\n"));
8665 /* TODO: Do UL SCELL CFG during UL CA dev */
8667 ueUl = RG_SCH_CMN_GET_UL_UE(ue, sCell);
8669 /* TODO_ULCA: SRS for SCELL needs to be handled in the below function call */
8670 rgSCHCmnUpdUeUlCqiInfo(sCell, ue, ueUl, ueSchCmn, cellSchd,
8671 sCell->isCpUlExtend);
8673 ret = rgSCHUhmHqEntInit(sCell, ue);
8676 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL UHM HARQ Ent Init "
8677 "Failed for CRNTI:%d", ue->ueId);
8681 ueUlPcell = RG_SCH_CMN_GET_UL_UE(ue, ue->cell);
8682 /* Initialize uplink HARQ related information for UE */
8683 ueUl->hqEnt.maxHqRetx = ueUlPcell->hqEnt.maxHqRetx;
8684 cmLListInit(&ueUl->hqEnt.free);
8685 cmLListInit(&ueUl->hqEnt.inUse);
8686 for(i=0; i < ueUl->hqEnt.numHqPrcs; i++)
8688 ueUl->hqEnt.hqProcCb[i].hqEnt = (void*)(&ueUl->hqEnt);
8689 ueUl->hqEnt.hqProcCb[i].procId = i;
8690 ueUl->hqEnt.hqProcCb[i].ulSfIdx = RGSCH_INVALID_INFO;
8691 ueUl->hqEnt.hqProcCb[i].alloc = NULLP;
8693 /* ccpu00139513- Initializing SPS flags*/
8694 ueUl->hqEnt.hqProcCb[i].isSpsActvnHqP = FALSE;
8695 ueUl->hqEnt.hqProcCb[i].isSpsOccnHqP = FALSE;
8697 cmLListAdd2Tail(&ueUl->hqEnt.free, &ueUl->hqEnt.hqProcCb[i].lnk);
8698 ueUl->hqEnt.hqProcCb[i].lnk.node = (PTR)&ueUl->hqEnt.hqProcCb[i];
8701 /* Allocate UL BSR allocation tracking List */
8702 cmLListInit(&ueUl->ulAllocLst);
8704 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
8706 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8707 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
8709 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL Memory allocation FAILED"
8710 "for CRNTI:%d",ue->ueId);
8711 err->errCause = RGSCHERR_SCH_CFG;
8714 allRcd->allocTime = sCell->crntTime;
8715 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
8716 allRcd->lnk.node = (PTR)allRcd;
8719 /* After initialising UL part, do power related init */
8720 ret = rgSCHPwrUeSCellCfg(sCell, ue, sCellInfoCfg);
8723 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Could not do "
8724 "power config for UE CRNTI:%d",ue->ueId);
8729 if(TRUE == ue->isEmtcUe)
8731 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8733 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8734 "for CRNTI:%d",ue->ueId);
8741 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8743 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8744 "for CRNTI:%d",ue->ueId);
8749 ue->ul.isUlCaEnabled = TRUE;
8753 } /* rgSCHCmnRgrSCellUeCfg */
8757 * @brief UE initialisation for scheduler.
8761 * Function : rgSCHCmnRgrSCellUeDel
8763 * This functions Delete UE specific scheduler
8764 * information for SCELL
8766 * @param[in] RgSchCellCb *cell
8767 * @param[in] RgSchUeCb *ue
8773 PUBLIC S16 rgSCHCmnRgrSCellUeDel
8775 RgSchUeCellInfo *sCellInfo,
8779 PUBLIC S16 rgSCHCmnRgrSCellUeDel(sCellInfo, ue)
8780 RgSchUeCellInfo *sCellInfo;
8784 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8785 Inst inst = ue->cell->instIdx;
8787 TRC2(rgSCHCmnRgrSCellUeDel);
8789 cellSchd->apisDl->rgSCHRgrSCellDlUeDel(sCellInfo, ue);
8792 rgSCHCmnUlUeDelAllocs(sCellInfo->cell, ue);
8795 if(TRUE == ue->isEmtcUe)
8797 cellSchd->apisEmtcUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8802 cellSchd->apisUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8805 /* DLFS UE Config */
8806 if (cellSchd->dl.isDlFreqSel)
8808 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeDel(sCellInfo->cell, ue)) != ROK)
8810 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS Scell del FAILED\n"));
8815 rgSCHUtlFreeSBuf(sCellInfo->cell->instIdx,
8816 (Data**)(&(sCellInfo->sch)), (sizeof(RgSchCmnUe)));
8820 } /* rgSCHCmnRgrSCellUeDel */
8826 * @brief Handles 5gtf configuration for a UE
8830 * Function : rgSCHCmn5gtfUeCfg
8836 * @param[in] RgSchCellCb *cell
8837 * @param[in] RgSchUeCb *ue
8838 * @param[in] RgrUeCfg *cfg
8844 PUBLIC S16 rgSCHCmn5gtfUeCfg
8851 PUBLIC S16 rgSCHCmn5gtfUeCfg(cell, ue, cfg)
8857 TRC2(rgSCHCmnRgrUeCfg);
8859 RgSchUeGrp *ue5gtfGrp;
8860 ue->ue5gtfCb.grpId = cfg->ue5gtfCfg.grpId;
8861 ue->ue5gtfCb.BeamId = cfg->ue5gtfCfg.BeamId;
8862 ue->ue5gtfCb.numCC = cfg->ue5gtfCfg.numCC;
8863 ue->ue5gtfCb.mcs = cfg->ue5gtfCfg.mcs;
8864 ue->ue5gtfCb.maxPrb = cfg->ue5gtfCfg.maxPrb;
8866 ue->ue5gtfCb.cqiRiPer = 100;
8867 /* 5gtf TODO: CQIs to start from (10,0)*/
8868 ue->ue5gtfCb.nxtCqiRiOccn.sfn = 10;
8869 ue->ue5gtfCb.nxtCqiRiOccn.slot = 0;
8870 ue->ue5gtfCb.rank = 1;
8872 printf("\nschd cfg at mac,%u,%u,%u,%u,%u\n",ue->ue5gtfCb.grpId,ue->ue5gtfCb.BeamId,ue->ue5gtfCb.numCC,
8873 ue->ue5gtfCb.mcs,ue->ue5gtfCb.maxPrb);
8875 ue5gtfGrp = &(cell->cell5gtfCb.ueGrp5gConf[ue->ue5gtfCb.BeamId]);
8877 /* TODO_5GTF: Currently handling 1 group only. Need to update when multi group
8878 scheduling comes into picture */
8879 if(ue5gtfGrp->beamBitMask & (1 << ue->ue5gtfCb.BeamId))
8881 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8882 "5GTF_ERROR Invalid beam id CRNTI:%d",cfg->crnti);
8885 ue5gtfGrp->beamBitMask |= (1 << ue->ue5gtfCb.BeamId);
8892 * @brief UE initialisation for scheduler.
8896 * Function : rgSCHCmnRgrUeCfg
8898 * This functions intialises UE specific scheduler
8900 * 0. Perform basic validations
8901 * 1. Allocate common sched UE cntrl blk
8902 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8904 * 4. Perform DLFS cfg
8906 * @param[in] RgSchCellCb *cell
8907 * @param[in] RgSchUeCb *ue
8908 * @param[int] RgrUeCfg *ueCfg
8909 * @param[out] RgSchErrInfo *err
8915 PUBLIC S16 rgSCHCmnRgrUeCfg
8923 PUBLIC S16 rgSCHCmnRgrUeCfg(cell, ue, ueCfg, err)
8930 RgSchDlRbAlloc *allocInfo;
8932 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8933 RgSchCmnUe *ueSchCmn;
8937 RgSchCmnAllocRecord *allRcd;
8939 U32 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8940 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
8941 TRC2(rgSCHCmnRgrUeCfg);
8944 /* 1. Allocate Common sched control block */
8945 if((rgSCHUtlAllocSBuf(cell->instIdx,
8946 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8948 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8949 "Memory allocation FAILED for CRNTI:%d",ueCfg->crnti);
8950 err->errCause = RGSCHERR_SCH_CFG;
8953 ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
8954 ue->dl.ueDlCqiCfg = ueCfg->ueDlCqiCfg;
8955 pCellInfo->acqiCb.aCqiCfg = ueCfg->ueDlCqiCfg.aprdCqiCfg;
8956 if(ueCfg->ueCatEnum > 0 )
8958 /*KWORK_FIX removed NULL chk for ueSchCmn*/
8959 ueSchCmn->cmn.ueCat = ueCfg->ueCatEnum - 1;
8963 ueSchCmn->cmn.ueCat = 0; /* Assuming enum values correctly set */
8965 cmInitTimers(&ueSchCmn->cmn.tmr, 1);
8967 /*2. Perform UEs downlink configuration */
8968 ueDl = &ueSchCmn->dl;
8969 /* RACHO : store the rapId assigned for HandOver UE.
8970 * Append UE to handover list of cmnCell */
8971 if (ueCfg->dedPreambleId.pres == PRSNT_NODEF)
8973 rgSCHCmnDelDedPreamble(cell, ueCfg->dedPreambleId.val);
8974 ueDl->rachInfo.hoRapId = ueCfg->dedPreambleId.val;
8975 cmLListAdd2Tail(&cellSchd->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
8976 ueDl->rachInfo.hoLnk.node = (PTR)ue;
8979 rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd);
8981 if (ueCfg->txMode.pres == TRUE)
8983 if ((ueCfg->txMode.txModeEnum == RGR_UE_TM_4) ||
8984 (ueCfg->txMode.txModeEnum == RGR_UE_TM_6))
8986 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
8988 if (ueCfg->txMode.txModeEnum == RGR_UE_TM_3)
8990 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
8993 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
8994 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
8997 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
8998 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
8999 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9002 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9006 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9009 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
9011 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9012 rgSchTddDlNumHarqProcTbl[cell->ulDlCfgIdx]);
9014 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9015 RGSCH_NUM_DL_HQ_PROC);
9018 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
9020 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
9022 /* if none of the DL and UL AMBR are configured then fail the configuration
9024 if((ueCfg->ueQosCfg.dlAmbr == 0) && (ueCfg->ueQosCfg.ueBr == 0))
9026 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"UL Ambr and DL Ambr are"
9027 "configured as 0 for CRNTI:%d",ueCfg->crnti);
9028 err->errCause = RGSCHERR_SCH_CFG;
9032 ue->dl.ambrCfgd = (ueCfg->ueQosCfg.dlAmbr * RG_SCH_CMN_REFRESH_TIME)/100;
9034 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
9035 allocInfo->rnti = ue->ueId;
9037 /* Initializing the lastCfi value to current cfi value */
9038 ueDl->lastCfi = cellSchd->dl.currCfi;
9040 if(cell->emtcEnable && ue->isEmtcUe)
9042 if ((cellSchd->apisEmtcDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9044 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9045 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9053 if ((cellSchd->apisDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9055 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9056 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9063 /* 3. Initialize ul part */
9064 ueUl = &ueSchCmn->ul;
9066 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
9067 cell->isCpUlExtend);
9069 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9070 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9072 ue->ul.cfgdAmbr = (ueCfg->ueQosCfg.ueBr * RG_SCH_CMN_REFRESH_TIME)/100;
9073 ue->ul.effAmbr = ue->ul.cfgdAmbr;
9074 RGSCHCPYTIMEINFO(cell->crntTime, ue->ul.ulTransTime);
9076 /* Allocate UL BSR allocation tracking List */
9077 cmLListInit(&ueUl->ulAllocLst);
9079 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
9081 if((rgSCHUtlAllocSBuf(cell->instIdx,
9082 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
9084 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation FAILED"
9085 "for CRNTI:%d",ueCfg->crnti);
9086 err->errCause = RGSCHERR_SCH_CFG;
9089 allRcd->allocTime = cell->crntTime;
9090 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
9091 allRcd->lnk.node = (PTR)allRcd;
9093 /* Allocate common sch cntrl blocks for LCGs */
9094 for (cnt=0; cnt<RGSCH_MAX_LCG_PER_UE; cnt++)
9096 ret = rgSCHUtlAllocSBuf(cell->instIdx,
9097 (Data**)&(ue->ul.lcgArr[cnt].sch), (sizeof(RgSchCmnLcg)));
9100 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9101 "SCH struct alloc failed for CRNTI:%d",ueCfg->crnti);
9102 err->errCause = RGSCHERR_SCH_CFG;
9106 /* After initialising UL part, do power related init */
9107 ret = rgSCHPwrUeCfg(cell, ue, ueCfg);
9110 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9111 "power config for UE CRNTI:%d",ueCfg->crnti);
9115 ret = rgSCHCmnSpsUeCfg(cell, ue, ueCfg, err);
9118 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9119 "SPS config for CRNTI:%d",ueCfg->crnti);
9122 #endif /* LTEMAC_SPS */
9125 if(TRUE == ue->isEmtcUe)
9127 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9129 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9130 "for CRNTI:%d",ueCfg->crnti);
9137 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9139 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9140 "for CRNTI:%d",ueCfg->crnti);
9145 /* DLFS UE Config */
9146 if (cellSchd->dl.isDlFreqSel)
9148 if ((cellSchd->apisDlfs->rgSCHDlfsUeCfg(cell, ue, ueCfg, err)) != ROK)
9150 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "DLFS UE config FAILED"
9151 "for CRNTI:%d",ueCfg->crnti);
9156 /* Fix: syed align multiple UEs to refresh at same time */
9157 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9158 /* Start UE Qos Refresh Timer */
9159 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9161 rgSCHCmn5gtfUeCfg(cell, ue, ueCfg);
9165 } /* rgSCHCmnRgrUeCfg */
9168 * @brief UE TX mode reconfiguration handler.
9172 * Function : rgSCHCmnDlHdlTxModeRecfg
9174 * This functions updates UE specific scheduler
9175 * information upon UE reconfiguration.
9177 * @param[in] RgSchUeCb *ue
9178 * @param[in] RgrUeRecfg *ueRecfg
9183 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9187 RgrUeRecfg *ueRecfg,
9191 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, numTxPorts)
9194 RgrUeRecfg *ueRecfg;
9199 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9206 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg)
9209 RgrUeRecfg *ueRecfg;
9213 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9214 TRC2(rgSCHCmnDlHdlTxModeRecfg);
9216 if (ueRecfg->txMode.pres != PRSNT_NODEF)
9220 /* ccpu00140894- Starting Timer for TxMode Transition Completion*/
9221 ue->txModeTransCmplt =FALSE;
9222 rgSCHTmrStartTmr (ue->cell, ue, RG_SCH_TMR_TXMODE_TRNSTN, RG_SCH_TXMODE_TRANS_TIMER);
9223 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_CMPLT)
9225 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell,
9226 RG_SCH_CMN_TD_TXMODE_RECFG);
9227 /* MS_WORKAROUND for ccpu00123186 MIMO Fix Start: need to set FORCE TD bitmap based on TX mode */
9228 ueDl->mimoInfo.ri = 1;
9229 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9230 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9232 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9234 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9236 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9238 /* MIMO Fix End: need to set FORCE TD bitmap based on TX mode */
9241 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_START)
9243 /* start afresh forceTD masking */
9244 RG_SCH_CMN_INIT_FORCE_TD(ue, cell, 0);
9245 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_TXMODE_RECFG);
9246 /* Intialize MIMO related parameters of UE */
9249 if(ueRecfg->txMode.pres)
9251 if((ueRecfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9252 (ueRecfg->txMode.txModeEnum ==RGR_UE_TM_4))
9254 if(ueRecfg->ueCodeBookRstRecfg.pres)
9257 rgSCHCmnComputeRank(ueRecfg->txMode.txModeEnum,
9258 ueRecfg->ueCodeBookRstRecfg.pmiBitMap, numTxPorts);
9262 ueDl->mimoInfo.ri = 1;
9267 ueDl->mimoInfo.ri = 1;
9272 ueDl->mimoInfo.ri = 1;
9275 ueDl->mimoInfo.ri = 1;
9276 #endif /* TFU_UPGRADE */
9277 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9278 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9280 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9282 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9284 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9289 /***********************************************************
9291 * Func : rgSCHCmnUpdUeMimoInfo
9293 * Desc : Updates UL and DL Ue Information
9301 **********************************************************/
9303 PRIVATE Void rgSCHCmnUpdUeMimoInfo
9308 RgSchCmnCell *cellSchd
9311 PRIVATE Void rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd)
9315 RgSchCmnCell *cellSchd;
9318 TRC2(rgSCHCmnUpdUeMimoInfo)
9320 if(ueCfg->txMode.pres)
9322 if((ueCfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9323 (ueCfg->txMode.txModeEnum ==RGR_UE_TM_4))
9325 if(ueCfg->ueCodeBookRstCfg.pres)
9328 rgSCHCmnComputeRank(ueCfg->txMode.txModeEnum,
9329 ueCfg->ueCodeBookRstCfg.pmiBitMap, cell->numTxAntPorts);
9333 ueDl->mimoInfo.ri = 1;
9338 ueDl->mimoInfo.ri = 1;
9343 ueDl->mimoInfo.ri = 1;
9347 ueDl->mimoInfo.ri = 1;
9348 #endif /*TFU_UPGRADE */
9349 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
9350 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
9354 /***********************************************************
9356 * Func : rgSCHCmnUpdUeUlCqiInfo
9358 * Desc : Updates UL and DL Ue Information
9366 **********************************************************/
9368 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo
9373 RgSchCmnUe *ueSchCmn,
9374 RgSchCmnCell *cellSchd,
9378 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd, isEcp)
9382 RgSchCmnUe *ueSchCmn;
9383 RgSchCmnCell *cellSchd;
9388 TRC2(rgSCHCmnUpdUeUlCqiInfo)
9391 if(ue->srsCb.srsCfg.type == RGR_SCH_SRS_SETUP)
9393 if(ue->ul.ulTxAntSel.pres)
9395 ueUl->crntUlCqi[ue->srsCb.selectedAnt] = cellSchd->ul.dfltUlCqi;
9396 ueUl->validUlCqi = ueUl->crntUlCqi[ue->srsCb.selectedAnt];
9400 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9401 ueUl->validUlCqi = ueUl->crntUlCqi[0];
9403 ue->validTxAnt = ue->srsCb.selectedAnt;
9407 ueUl->validUlCqi = cellSchd->ul.dfltUlCqi;
9411 ueUl->ulLaCb.cqiBasediTbs = rgSchCmnUlCqiToTbsTbl[isEcp]
9412 [ueUl->validUlCqi] * 100;
9413 ueUl->ulLaCb.deltaiTbs = 0;
9417 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9418 #endif /*TFU_UPGRADE */
9419 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
9420 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9422 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9426 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9431 /***********************************************************
9433 * Func : rgSCHCmnUpdUeCatCfg
9435 * Desc : Updates UL and DL Ue Information
9443 **********************************************************/
9445 PRIVATE Void rgSCHCmnUpdUeCatCfg
9451 PRIVATE Void rgSCHCmnUpdUeCatCfg(ue, cell)
9456 RgSchDlHqEnt *hqE = NULLP;
9457 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9458 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9459 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
9460 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
9462 TRC2(rgSCHCmnUpdUeCatCfg)
9464 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
9466 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9469 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9470 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9471 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9472 && (RG_SCH_MAX_TX_LYRS_4 == ri))
9474 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9478 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9481 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9483 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9485 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9489 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9491 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9492 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9497 * @brief UE reconfiguration for scheduler.
9501 * Function : rgSChCmnRgrUeRecfg
9503 * This functions updates UE specific scheduler
9504 * information upon UE reconfiguration.
9506 * @param[in] RgSchCellCb *cell
9507 * @param[in] RgSchUeCb *ue
9508 * @param[int] RgrUeRecfg *ueRecfg
9509 * @param[out] RgSchErrInfo *err
9515 PUBLIC S16 rgSCHCmnRgrUeRecfg
9519 RgrUeRecfg *ueRecfg,
9523 PUBLIC S16 rgSCHCmnRgrUeRecfg(cell, ue, ueRecfg, err)
9526 RgrUeRecfg *ueRecfg;
9530 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9533 TRC2(rgSCHCmnRgrUeRecfg);
9534 /* Basic validations */
9535 if (ueRecfg->ueRecfgTypes & RGR_UE_TXMODE_RECFG)
9538 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, cell->numTxAntPorts);
9540 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg);
9541 #endif /* TFU_UPGRADE */
9543 if(ueRecfg->ueRecfgTypes & RGR_UE_CSG_PARAM_RECFG)
9545 ue->csgMmbrSta = ueRecfg->csgMmbrSta;
9547 /* Changes for UE Category reconfiguration feature */
9548 if(ueRecfg->ueRecfgTypes & RGR_UE_UECAT_RECFG)
9550 rgSCHCmnUpdUeCatCfg(ue, cell);
9552 if (ueRecfg->ueRecfgTypes & RGR_UE_APRD_DLCQI_RECFG)
9554 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
9555 pCellInfo->acqiCb.aCqiCfg = ueRecfg->aprdDlCqiRecfg;
9558 if (ueRecfg->ueRecfgTypes & RGR_UE_PRD_DLCQI_RECFG)
9560 if ((ueRecfg->prdDlCqiRecfg.pres == TRUE)
9561 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD10)
9562 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD20))
9564 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unsupported periodic CQI "
9565 "reporting mode %d for old CRNIT:%d",
9566 (int)ueRecfg->prdDlCqiRecfg.prdModeEnum,ueRecfg->oldCrnti);
9567 err->errCause = RGSCHERR_SCH_CFG;
9570 ue->dl.ueDlCqiCfg.prdCqiCfg = ueRecfg->prdDlCqiRecfg;
9574 if (ueRecfg->ueRecfgTypes & RGR_UE_ULPWR_RECFG)
9576 if (rgSCHPwrUeRecfg(cell, ue, ueRecfg) != ROK)
9578 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9579 "Power Reconfiguration Failed for OLD CRNTI:%d",ueRecfg->oldCrnti);
9584 if (ueRecfg->ueRecfgTypes & RGR_UE_QOS_RECFG)
9586 /* Uplink Sched related Initialization */
9587 if ((ueRecfg->ueQosRecfg.dlAmbr == 0) && (ueRecfg->ueQosRecfg.ueBr == 0))
9589 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Ul Ambr and DL Ambr "
9590 "configured as 0 for OLD CRNTI:%d",ueRecfg->oldCrnti);
9591 err->errCause = RGSCHERR_SCH_CFG;
9594 ue->ul.cfgdAmbr = (ueRecfg->ueQosRecfg.ueBr * \
9595 RG_SCH_CMN_REFRESH_TIME)/100;
9596 /* Downlink Sched related Initialization */
9597 ue->dl.ambrCfgd = (ueRecfg->ueQosRecfg.dlAmbr * \
9598 RG_SCH_CMN_REFRESH_TIME)/100;
9599 /* Fix: syed Update the effAmbr and effUeBR fields w.r.t the
9600 * new QOS configuration */
9601 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9602 /* Fix: syed align multiple UEs to refresh at same time */
9603 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9604 rgSCHCmnApplyUeRefresh(cell, ue);
9605 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9608 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9610 if ((cellSchCmn->apisEmtcUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9612 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9613 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9616 if ((cellSchCmn->apisEmtcDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9618 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9619 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9626 if ((cellSchCmn->apisUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9628 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9629 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9632 if ((cellSchCmn->apisDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9634 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9635 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9639 /* DLFS UE Config */
9640 if (cellSchCmn->dl.isDlFreqSel)
9642 if ((cellSchCmn->apisDlfs->rgSCHDlfsUeRecfg(cell, ue, \
9643 ueRecfg, err)) != ROK)
9645 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9646 "DLFS UE re-config FAILED for CRNTI:%d",ue->ueId);
9652 /* Invoke re-configuration on SPS module */
9653 if (rgSCHCmnSpsUeRecfg(cell, ue, ueRecfg, err) != ROK)
9655 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9656 "DL SPS ReCFG FAILED for UE CRNTI:%d", ue->ueId);
9662 } /* rgSCHCmnRgrUeRecfg*/
9664 /***********************************************************
9666 * Func : rgSCHCmnUlUeDelAllocs
9668 * Desc : Deletion of all UE allocations.
9676 **********************************************************/
9678 PRIVATE Void rgSCHCmnUlUeDelAllocs
9684 PRIVATE Void rgSCHCmnUlUeDelAllocs(cell, ue)
9689 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
9690 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
9693 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
9695 TRC2(rgSCHCmnUlUeDelAllocs);
9697 for (i = 0; i < ueUl->hqEnt.numHqPrcs; ++i)
9699 RgSchUlHqProcCb *proc = rgSCHUhmGetUlHqProc(cell, ue, i);
9702 /* proc can't be NULL here */
9710 /* Added Insure Fixes Of reading Dangling memory.NULLed crntAlloc */
9712 if(proc->alloc == ulSpsUe->ulSpsSchdInfo.crntAlloc)
9714 ulSpsUe->ulSpsSchdInfo.crntAlloc = NULLP;
9715 ulSpsUe->ulSpsSchdInfo.crntAllocSf = NULLP;
9719 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9720 proc->alloc,ue->isEmtcUe);
9722 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9725 /* PHY probably needn't be intimated since
9726 * whatever intimation it needs happens at the last minute
9729 /* Fix: syed Adaptive Msg3 Retx crash. Remove the harqProc
9730 * from adaptive retx List. */
9731 if (proc->reTxLnk.node)
9734 //TODO_SID: Need to take care
9735 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
9736 proc->reTxLnk.node = (PTR)NULLP;
9744 /***********************************************************
9746 * Func : rgSCHCmnDelUeFrmRefreshQ
9748 * Desc : Adds a UE to refresh queue, so that the UE is
9749 * periodically triggered to refresh it's GBR and
9758 **********************************************************/
9760 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ
9766 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ(cell, ue)
9771 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
9773 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
9775 TRC2(rgSCHCmnDelUeFrmRefreshQ);
9777 #ifdef RGL_SPECIFIC_CHANGES
9778 if(ue->refreshOffset < RGSCH_MAX_REFRESH_GRPSZ)
9780 if(cell->refreshUeCnt[ue->refreshOffset])
9782 cell->refreshUeCnt[ue->refreshOffset]--;
9788 cmMemset((U8 *)&arg, 0, sizeof(arg));
9789 arg.tqCp = &sched->tmrTqCp;
9790 arg.tq = sched->tmrTq;
9791 arg.timers = &ueSchd->tmr;
9795 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
9801 /***********************************************************
9803 * Func : rgSCHCmnUeCcchSduDel
9805 * Desc : Clear CCCH SDU scheduling context.
9813 **********************************************************/
9815 PRIVATE Void rgSCHCmnUeCcchSduDel
9821 PRIVATE Void rgSCHCmnUeCcchSduDel(cell, ueCb)
9826 RgSchDlHqEnt *hqE = NULLP;
9827 RgSchDlHqProcCb *ccchSduHqP = NULLP;
9828 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
9830 TRC2(rgSCHCmnUeCcchSduDel);
9832 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
9837 ccchSduHqP = hqE->ccchSduProc;
9838 if(ueCb->ccchSduLnk.node != NULLP)
9840 /* Remove the ccchSduProc if it is in the Tx list */
9841 cmLListDelFrm(&(cell->ccchSduUeLst), &(ueCb->ccchSduLnk));
9842 ueCb->ccchSduLnk.node = NULLP;
9844 else if(ccchSduHqP != NULLP)
9846 /* Fix for crash due to stale pdcch. Release ccch pdcch*/
9847 if(ccchSduHqP->pdcch)
9849 cmLListDelFrm(&ccchSduHqP->subFrm->pdcchInfo.pdcchs,
9850 &ccchSduHqP->pdcch->lnk);
9851 cmLListAdd2Tail(&cell->pdcchLst, &ccchSduHqP->pdcch->lnk);
9852 ccchSduHqP->pdcch = NULLP;
9854 if(ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node != NULLP)
9856 /* Remove the ccchSduProc if it is in the retx list */
9857 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
9858 &ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk);
9859 /* ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node = NULLP; */
9860 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9862 else if ((ccchSduHqP->subFrm != NULLP) &&
9863 (ccchSduHqP->hqPSfLnk.node != NULLP))
9865 rgSCHUtlDlHqPTbRmvFrmTx(ccchSduHqP->subFrm,
9866 ccchSduHqP, 0, FALSE);
9867 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9877 * @brief UE deletion for scheduler.
9881 * Function : rgSCHCmnUeDel
9883 * This functions deletes all scheduler information
9884 * pertaining to an UE.
9886 * @param[in] RgSchCellCb *cell
9887 * @param[in] RgSchUeCb *ue
9891 PUBLIC Void rgSCHCmnUeDel
9897 PUBLIC Void rgSCHCmnUeDel(cell, ue)
9902 RgSchDlHqEnt *hqE = NULLP;
9903 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9905 RgSchCmnAllocRecord *allRcd;
9907 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9909 TRC2(rgSCHCmnUeDel);
9911 if (RG_SCH_CMN_GET_UE(ue,cell) == NULLP)
9913 /* Common scheduler config has not happened yet */
9916 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9919 /* UE Free can be triggered before MSG4 done when dlHqE is not updated */
9923 rgSCHEmtcCmnUeCcchSduDel(cell, ue);
9928 rgSCHCmnUeCcchSduDel(cell, ue);
9931 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9933 rgSCHCmnUlUeDelAllocs(cell, ue);
9935 rgSCHCmnDelRachInfo(cell, ue);
9938 if(TRUE == ue->isEmtcUe)
9940 cellSchCmn->apisEmtcUl->rgSCHFreeUlUe(cell, ue);
9945 cellSchCmn->apisUl->rgSCHFreeUlUe(cell, ue);
9950 for(idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
9952 if(ue->cellInfo[idx] != NULLP)
9954 rgSCHSCellDelUeSCell(cell,ue,idx);
9961 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9963 cellSchCmn->apisEmtcDl->rgSCHFreeDlUe(cell, ue);
9968 cellSchCmn->apisDl->rgSCHFreeDlUe(cell, ue);
9970 rgSCHPwrUeDel(cell, ue);
9973 rgSCHCmnSpsUeDel(cell, ue);
9974 #endif /* LTEMAC_SPS*/
9977 rgSchCmnDlSfHqDel(ue, cell);
9979 /* DLFS UE delete */
9980 if (cellSchCmn->dl.isDlFreqSel)
9982 cellSchCmn->apisDlfs->rgSCHDlfsUeDel(cell, ue);
9984 node = ueUl->ulAllocLst.first;
9986 /* ccpu00117052 - MOD - Passing double pointer in all the places of
9987 rgSCHUtlFreeSBuf function call for proper NULLP assignment*/
9990 allRcd = (RgSchCmnAllocRecord *)node->node;
9992 cmLListDelFrm(&ueUl->ulAllocLst, &allRcd->lnk);
9993 rgSCHUtlFreeSBuf(cell->instIdx,
9994 (Data**)(&allRcd), (sizeof(RgSchCmnAllocRecord)));
9997 for(cnt = 0; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
9999 if (ue->ul.lcgArr[cnt].sch != NULLP)
10001 rgSCHUtlFreeSBuf(cell->instIdx,
10002 (Data**)(&(ue->ul.lcgArr[cnt].sch)), (sizeof(RgSchCmnLcg)));
10006 /* Fix : syed Moved hqEnt deinit to rgSCHCmnDlDeInitHqEnt */
10007 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId) & (CM_LTE_MAX_CELLS - 1));
10008 rgSCHUtlFreeSBuf(cell->instIdx,
10009 (Data**)(&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch))), (sizeof(RgSchCmnUe)));
10011 } /* rgSCHCmnUeDel */
10015 * @brief This function handles the common code rate configurations
10016 * done as part of RgrCellCfg/RgrCellRecfg.
10020 * Function: rgSCHCmnDlCnsdrCmnRt
10021 * Purpose: This function handles the common code rate configurations
10022 * done as part of RgrCellCfg/RgrCellRecfg.
10024 * Invoked by: Scheduler
10026 * @param[in] RgSchCellCb *cell
10027 * @param[in] RgrDlCmnCodeRateCfg *dlCmnCodeRate
10032 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt
10035 RgrDlCmnCodeRateCfg *dlCmnCodeRate
10038 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt(cell, dlCmnCodeRate)
10040 RgrDlCmnCodeRateCfg *dlCmnCodeRate;
10043 RgSchCmnCell *cellDl = RG_SCH_CMN_GET_CELL(cell);
10050 TRC2(rgSCHCmnDlCnsdrCmnRt);
10052 /* code rate is bits per 1024 phy bits, since modl'n scheme is 2. it is
10053 * bits per 1024/2 REs */
10054 if (dlCmnCodeRate->bcchPchRaCodeRate != 0)
10056 bitsPerRb = ((dlCmnCodeRate->bcchPchRaCodeRate * 2) *
10057 cellDl->dl.noResPerRb[3])/1024;
10061 bitsPerRb = ((RG_SCH_CMN_DEF_BCCHPCCH_CODERATE * 2) *
10062 cellDl->dl.noResPerRb[3])/1024;
10064 /* Store bitsPerRb in cellDl->dl to use later to determine
10065 * Number of RBs for UEs with SI-RNTI, P-RNTI and RA-RNTI */
10066 cellDl->dl.bitsPerRb = bitsPerRb;
10067 /* ccpu00115595 end*/
10068 /* calculate the ITbs for 2 RBs. Initialize ITbs to MAX value */
10071 bitsPer2Rb = bitsPerRb * rbNum;
10072 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer2Rb))
10075 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs2Rbs = 0) :
10076 (cellDl->dl.cmnChITbs.iTbs2Rbs = i-1);
10078 /* calculate the ITbs for 3 RBs. Initialize ITbs to MAX value */
10081 bitsPer3Rb = bitsPerRb * rbNum;
10082 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer3Rb))
10085 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs3Rbs = 0) :
10086 (cellDl->dl.cmnChITbs.iTbs3Rbs = i-1);
10089 pdcchBits = 1 + /* Flag for format0/format1a differentiation */
10090 1 + /* Localized/distributed VRB assignment flag */
10093 3 + /* Harq process Id */
10095 4 + /* Harq process Id */
10096 2 + /* UL Index or DAI */
10098 1 + /* New Data Indicator */
10101 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
10102 (cell->bwCfg.dlTotalBw + 1))/2);
10103 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
10104 Since VRB is local */
10105 /* For TDD consider DAI */
10107 /* Convert the pdcchBits to actual pdcchBits required for transmission */
10108 if (dlCmnCodeRate->pdcchCodeRate != 0)
10110 pdcchBits = (pdcchBits * 1024)/dlCmnCodeRate->pdcchCodeRate;
10111 if (pdcchBits <= 288) /* 288 : Num of pdcch bits for aggrLvl=4 */
10113 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10115 else /* 576 : Num of pdcch bits for aggrLvl=8 */
10117 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL8;
10122 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10124 if (dlCmnCodeRate->ccchCqi == 0)
10130 cellDl->dl.ccchCqi = dlCmnCodeRate->ccchCqi;
10137 * @brief This function handles the configuration of cell for the first
10138 * time by the scheduler.
10142 * Function: rgSCHCmnDlRgrCellCfg
10143 * Purpose: Configuration received is stored into the data structures
10144 * Also, update the scheduler with the number of frames of
10145 * RACH preamble transmission.
10147 * Invoked by: BO and Scheduler
10149 * @param[in] RgSchCellCb* cell
10150 * @param[in] RgrCellCfg* cfg
10155 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10162 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10168 RgSchCmnCell *cellSch;
10173 U8 maxDlSubfrms = cell->numDlSubfrms;
10174 U8 splSubfrmIdx = cfg->spclSfCfgIdx;
10177 RgSchTddSubfrmInfo subfrmInfo = rgSchTddMaxUlSubfrmTbl[cell->ulDlCfgIdx];
10188 TRC2(rgSCHCmnDlRgrCellCfg);
10191 cellSch = RG_SCH_CMN_GET_CELL(cell);
10192 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->\
10193 rachCfg.preambleFormat];
10194 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10195 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10197 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10198 3 TTI (MAX L1+L2 processing delay at the UE) */
10199 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10200 rgSchCmnHarqRtt[cell->ulDlCfgIdx] + 3;
10201 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10202 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10203 if (cfg->maxUePerDlSf == 0)
10205 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10207 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10213 if (cell->bwCfg.dlTotalBw <= 10)
10223 /* DwPTS Scheduling Changes Start */
10224 cellSch->dl.splSfCfg = splSubfrmIdx;
10226 if (cfg->isCpDlExtend == TRUE)
10228 if((0 == splSubfrmIdx) || (4 == splSubfrmIdx) ||
10229 (7 == splSubfrmIdx) || (8 == splSubfrmIdx)
10232 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10236 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10241 /* Refer to 36.213 Section 7.1.7 */
10242 if((0 == splSubfrmIdx) || (5 == splSubfrmIdx))
10244 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10248 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10251 /* DwPTS Scheduling Changes End */
10253 splSfCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10254 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
10256 for (sfCount = 0; sfCount < maxDlSubfrms; sfCount++)
10258 sf = cell->subFrms[sfCount];
10259 /* Sfcount matches the first special subframe occurs at Index 0
10260 * or subsequent special subframes */
10261 if(subfrmInfo.switchPoints == 1)
10263 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10264 RG_SCH_CMN_10_MS_PRD, &subfrmInfo);
10268 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10269 RG_SCH_CMN_5_MS_PRD, &subfrmInfo);
10271 if(isSplfrm == TRUE)
10274 /* DwPTS Scheduling Changes Start */
10275 if (cell->splSubfrmCfg.isDlDataAllowed == TRUE)
10277 sf->sfType = RG_SCH_SPL_SF_DATA;
10281 sf->sfType = RG_SCH_SPL_SF_NO_DATA;
10283 /* DwPTS Scheduling Changes End */
10287 /* DwPTS Scheduling Changes Start */
10288 if (sf->sfNum != 0)
10290 sf->sfType = RG_SCH_DL_SF;
10294 sf->sfType = RG_SCH_DL_SF_0;
10296 /* DwPTS Scheduling Changes End */
10299 /* Calculate the number of CCEs per subframe in the cell */
10300 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][sf->sfNum];
10301 if(cell->dynCfiCb.isDynCfiEnb == TRUE)
10303 /* In case if Dynamic CFI feature is enabled, default CFI
10304 * value 1 is used */
10305 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][1];
10309 if (sf->sfType == RG_SCH_SPL_SF_DATA)
10311 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
10315 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi)];
10320 /* Intialize the RACH response scheduling related infromation */
10321 if(rgSCHCmnDlRachInfoInit(cell) != ROK)
10326 /* Allocate PRACH preamble list */
10327 rgSCHCmnDlCreateRachPrmLst(cell);
10329 /* Initialize PHICH offset information */
10330 rgSCHCmnDlPhichOffsetInit(cell);
10332 /* Update the size of HARQ ACK/NACK feedback table */
10333 /* The array size is increased by 2 to have enough free indices, where other
10334 * indices are busy waiting for HARQ feedback */
10335 cell->ackNackFdbkArrSize = rgSchTddANFdbkMapTbl[cell->ulDlCfgIdx] + 2;
10337 /* Initialize expected HARQ ACK/NACK feedback time */
10338 rgSCHCmnDlANFdbkInit(cell);
10340 /* Initialize UL association set index */
10341 if(cell->ulDlCfgIdx != 0)
10343 rgSCHCmnDlKdashUlAscInit(cell);
10346 if (cfg->isCpDlExtend == TRUE)
10348 cp = RG_SCH_CMN_EXT_CP;
10350 cell->splSubfrmCfg.dwPts =
10351 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlDwPts;
10353 if ( cell->splSubfrmCfg.dwPts == 0 )
10355 cell->isDwPtsCnted = FALSE;
10359 cell->isDwPtsCnted = TRUE;
10362 if(cfg->isCpUlExtend == TRUE)
10364 cell->splSubfrmCfg.upPts =
10365 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlExtUpPts;
10369 cell->splSubfrmCfg.upPts =
10370 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlNorUpPts;
10375 cp = RG_SCH_CMN_NOR_CP;
10377 cell->splSubfrmCfg.dwPts =
10378 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlDwPts;
10379 cell->isDwPtsCnted = TRUE;
10381 if(cfg->isCpUlExtend == TRUE)
10383 cell->splSubfrmCfg.upPts =
10384 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlExtUpPts;
10388 cell->splSubfrmCfg.upPts =
10389 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlNorUpPts;
10393 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10394 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++,cfiIdx++)
10396 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10397 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10398 [cell->numTxAntPorts]][cfiIdx];
10399 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10400 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10401 [cell->numTxAntPorts]][cfiIdx];
10404 /* Initializing the values of CFI parameters */
10405 if(cell->dynCfiCb.isDynCfiEnb)
10407 /* If DCFI is enabled, current CFI value will start from 1 */
10408 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10412 /* If DCFI is disabled, current CFI value is set as default max allowed CFI value */
10413 cellSch->dl.currCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10414 cellSch->dl.newCfi = cellSch->dl.currCfi;
10417 /* Include CRS REs while calculating Efficiency
10418 * The number of Resource Elements occupied by CRS depends on Number of
10419 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10420 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10421 * details of the same. Please note that PDCCH overlap symbols would not
10422 * considered in CRS REs deduction */
10423 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10425 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10426 - numPdcchSym) *RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10429 /* DwPTS Scheduling Changes Start */
10430 antPortIdx = (cell->numTxAntPorts == 1)? 0:
10431 ((cell->numTxAntPorts == 2)? 1: 2);
10433 if (cp == RG_SCH_CMN_NOR_CP)
10435 splSfIdx = (splSubfrmIdx == 4)? 1: 0;
10439 splSfIdx = (splSubfrmIdx == 3)? 1: 0;
10442 numCrs = rgSchCmnDwptsCrs[splSfIdx][antPortIdx];
10444 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI-1; cfi++)
10446 /* If CFI is 2 and Ant Port is 4, don't consider the sym 1 CRS REs */
10447 if (antPortIdx == 2 && cfi == 2)
10451 cellSch->dl.numReDwPts[cfi] = ((cell->splSubfrmCfg.dwPts - cfi)*
10452 RB_SCH_CMN_NUM_SCS_PER_RB) - numCrs;
10454 /* DwPTS Scheduling Changes End */
10456 if (cfg->maxDlBwPerUe == 0)
10458 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10462 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10464 if (cfg->maxDlRetxBw == 0)
10466 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10470 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10472 /* Fix: MUE_PERTTI_DL*/
10473 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10474 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10475 if (cfg->maxUePerDlSf == 0)
10477 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10479 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10480 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10481 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10483 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10484 "Invalid configuration !: "
10485 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10486 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10490 else if (!cfg->maxCcchPerDlSf)
10492 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10493 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10494 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10495 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10496 * FLE crash in PHY as PHY has limit of 16 max*/
10497 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10501 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10503 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10508 /*ccpu00118273 - ADD - start */
10509 cmLListInit(&cellSch->dl.msg4RetxLst);
10511 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10514 #ifdef RG_PHASE2_SCHED
10515 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10517 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10519 if (cfg->dlfsCfg.isDlFreqSel)
10521 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10527 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10530 /* Power related configuration */
10531 ret = rgSCHPwrCellCfg(cell, cfg);
10537 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10538 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10539 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10540 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10541 cellSch->dl.msg4pAVal = cfg->msg4pAVal;
10544 #else /* LTE_TDD */
10546 * @brief This function handles the configuration of cell for the first
10547 * time by the scheduler.
10551 * Function: rgSCHCmnDlRgrCellCfg
10552 * Purpose: Configuration received is stored into the data structures
10553 * Also, update the scheduler with the number of frames of
10554 * RACH preamble transmission.
10556 * Invoked by: BO and Scheduler
10558 * @param[in] RgSchCellCb* cell
10559 * @param[in] RgrCellCfg* cfg
10560 * @param[in] RgSchErrInfo* err
10565 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10572 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10579 RgSchCmnCell *cellSch;
10586 TRC2(rgSCHCmnDlRgrCellCfg);
10588 cellSch = RG_SCH_CMN_GET_CELL(cell);
10590 /* Initialize the parameters with the ones received in the */
10591 /* configuration. */
10593 /* Added matrix 'rgRaPrmblToRaFrmTbl' for computation of RA
10594 * sub-frames from preamble format */
10595 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->rachCfg.preambleFormat];
10597 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10598 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10600 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10601 3 TTI (MAX L1+L2 processing delay at the UE) */
10602 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10603 rgSchCmnHarqRtt[7] + 3;
10605 if (cell->bwCfg.dlTotalBw <= 10)
10616 if (cell->isCpDlExtend == TRUE)
10618 cp = RG_SCH_CMN_EXT_CP;
10623 cp = RG_SCH_CMN_NOR_CP;
10627 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10628 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, cfiIdx++)
10630 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10632 cellSch->dl.emtcCqiToTbsTbl[0][cfi] = rgSchEmtcCmnCqiToTbs[0][cp][cfiIdx];
10634 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10635 [cell->numTxAntPorts]][cfiIdx];
10636 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10638 cellSch->dl.emtcCqiToTbsTbl[1][cfi] = rgSchEmtcCmnCqiToTbs[1][cp][cfiIdx];
10640 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10641 [cell->numTxAntPorts]][cfiIdx];
10644 /* Initializing the values of CFI parameters */
10645 if(cell->dynCfiCb.isDynCfiEnb)
10647 /* If DCFI is enabled, current CFI value will start from 1 */
10648 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10652 /* If DCFI is disabled, current CFI value is set as default CFI value */
10653 cellSch->dl.currCfi = cellSch->cfiCfg.cfi;
10654 cellSch->dl.newCfi = cellSch->dl.currCfi;
10657 /* Include CRS REs while calculating Efficiency
10658 * The number of Resource Elements occupied by CRS depends on Number of
10659 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10660 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10661 * details of the same. Please note that PDCCH overlap symbols would not
10662 * considered in CRS REs deduction */
10663 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10665 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10666 - numPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10669 if (cfg->maxDlBwPerUe == 0)
10671 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10675 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10677 if (cfg->maxDlRetxBw == 0)
10679 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10683 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10686 /* Fix: MUE_PERTTI_DL*/
10687 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10688 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10689 if (cfg->maxUePerDlSf == 0)
10691 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10693 /* Fix: MUE_PERTTI_DL syed validating Cell Configuration */
10694 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10696 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10697 "FAILED MaxUePerDlSf(%u) < MaxDlUeNewTxPerTti(%u)",
10698 cellSch->dl.maxUePerDlSf,
10699 cellSch->dl.maxUeNewTxPerTti);
10702 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10703 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10705 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid configuration !: "
10706 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10707 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10711 else if (!cfg->maxCcchPerDlSf)
10713 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10714 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10715 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10716 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10717 * FLE crash in PHY as PHY has limit of 16 max*/
10718 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10722 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10726 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10730 cmLListInit(&cellSch->dl.msg4RetxLst);
10732 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10735 #ifdef RG_PHASE2_SCHED
10736 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10738 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10740 if (cfg->dlfsCfg.isDlFreqSel)
10742 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10748 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10751 /* Power related configuration */
10752 ret = rgSCHPwrCellCfg(cell, cfg);
10758 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10759 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10760 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10761 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10762 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10765 #endif /* LTE_TDD */
10767 /***********************************************************
10769 * Func : rgSCHCmnUlCalcReqRbCeil
10771 * Desc : Calculate RB required to satisfy 'bytes' for
10773 * Returns number of RBs such that requirement
10774 * is necessarily satisfied (does a 'ceiling'
10777 * Ret : Required RBs (U8)
10783 **********************************************************/
10785 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil
10789 RgSchCmnUlCell *cellUl
10792 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil(bytes, cqi, cellUl)
10795 RgSchCmnUlCell *cellUl;
10798 U32 numRe = RGSCH_CEIL((bytes * 8) * 1024, rgSchCmnUlCqiTbl[cqi].eff);
10799 TRC2(rgSCHCmnUlCalcReqRbCeil);
10800 RETVALUE((U8)RGSCH_CEIL(numRe, RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl)));
10803 /***********************************************************
10805 * Func : rgSCHCmnPrecompMsg3Vars
10807 * Desc : Precomputes the following for msg3 allocation:
10808 * 1. numSb and Imcs for msg size A
10809 * 2. numSb and Imcs otherwise
10813 * Notes: The corresponding vars in cellUl struct is filled
10818 **********************************************************/
10820 PRIVATE S16 rgSCHCmnPrecompMsg3Vars
10822 RgSchCmnUlCell *cellUl,
10829 PRIVATE S16 rgSCHCmnPrecompMsg3Vars(cellUl, ccchCqi, msgSzA, sbSize, isEcp)
10830 RgSchCmnUlCell *cellUl;
10842 U16 msg3GrntSz = 0;
10844 TRC2(rgSCHCmnPrecompMsg3Vars);
10846 if (ccchCqi > cellUl->max16qamCqi)
10848 ccchCqi = cellUl->max16qamCqi;
10850 /* #ifndef RG_SCH_CMN_EXP_CP_SUP For ECP Pick the index 1 */
10852 ccchTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10853 ccchMcs = rgSCHCmnUlGetIMcsFrmITbs(ccchTbs, CM_LTE_UE_CAT_1);
10855 /* MCS should fit in 4 bits in RAR */
10861 /* Limit the ccchMcs to 15 as it
10862 * can be inferred from 36.213, section 6.2 that msg3 imcs
10864 * Since, UE doesn't exist right now, we use CAT_1 for ue
10866 while((ccchMcs = (rgSCHCmnUlGetIMcsFrmITbs(
10867 rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi],CM_LTE_UE_CAT_1))
10869 RG_SCH_CMN_MAX_MSG3_IMCS)
10874 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10876 if (msgSzA < RGSCH_MIN_MSG3_GRNT_SZ)
10880 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(msgSzA, ccchCqi, cellUl), sbSize);
10882 numRb = numSb * sbSize;
10883 msg3GrntSz = 8 * msgSzA;
10885 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10888 numRb = numSb * sbSize;
10890 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10894 /* Reversed(Corrected) the assignment for preamble-GrpA
10895 * Refer- TG36.321- section- 5.1.2*/
10896 cellUl->ra.prmblBNumSb = numSb;
10897 cellUl->ra.prmblBIMcs = ccchMcs;
10898 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(RGSCH_MIN_MSG3_GRNT_SZ, \
10902 numRb = numSb * sbSize;
10903 msg3GrntSz = 8 * RGSCH_MIN_MSG3_GRNT_SZ;
10904 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10907 numRb = numSb * sbSize;
10909 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10913 /* Reversed(Corrected) the assignment for preamble-GrpA
10914 * Refer- TG36.321- section- 5.1.2*/
10915 cellUl->ra.prmblANumSb = numSb;
10916 cellUl->ra.prmblAIMcs = ccchMcs;
10920 PUBLIC U32 gPrntPucchDet=0;
10923 /***********************************************************
10925 * Func : rgSCHCmnUlCalcAvailBw
10927 * Desc : Calculates bandwidth available for PUSCH scheduling.
10929 * Ret : S16 (ROK/RFAILED)
10935 **********************************************************/
10937 PRIVATE S16 rgSCHCmnUlCalcAvailBw
10940 RgrCellCfg *cellCfg,
10946 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
10948 RgrCellCfg *cellCfg;
10955 U8 ulBw = cell->bwCfg.ulTotalBw;
10956 U8 n2Rb = cell->pucchCfg.resourceSize;
10957 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
10958 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
10959 U8 n1Cs = cell->pucchCfg.cyclicShift;
10966 U8 exclRb; /* RBs to exclude */
10969 /* To avoid PUCCH and PUSCH collision issue */
10973 /* Maximum value of M as per Table 10.1-1 */
10974 U8 M[RGSCH_MAX_TDD_UL_DL_CFG] = {1, 2, 4, 3, 4, 9, 1};
10976 TRC2(rgSCHCmnUlCalcAvailBw);
10978 if (cell->isCpUlExtend)
10983 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
10985 /* Considering the max no. of CCEs for PUSCH BW calculation
10986 * based on min mi value */
10987 if (cell->ulDlCfgIdx == 0 || cell->ulDlCfgIdx == 6)
10996 totalCce = cell->dynCfiCb.cfi2NCceTbl[mi][cfi];
10998 P = rgSCHCmnGetPValFrmCCE(cell, totalCce-1);
10999 n1PlusOne = cell->rgSchTddNpValTbl[P + 1];
11000 n1Max = (M[cell->ulDlCfgIdx] - 1)*n1PlusOne + (totalCce-1) + n1Pucch;
11002 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11004 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11005 n1Rb = (n1Max - n1RbPart)/ n1PerRb;
11006 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11008 /* get the total Number of RB's to be excluded for PUSCH */
11010 if(n1Pucch < n1RbPart)
11016 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11018 puschRbStart = exclRb/2 + 1;
11020 /* Num of PUCCH RBs = puschRbStart*2 */
11021 if (puschRbStart * 2 >= ulBw)
11023 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11027 *rbStartRef = puschRbStart;
11028 *bwAvailRef = ulBw - puschRbStart * 2;
11030 if(cell->pucchCfg.maxPucchRb !=0 &&
11031 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11033 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11040 /***********************************************************
11042 * Func : rgSCHCmnUlCalcAvailBw
11044 * Desc : Calculates bandwidth available for PUSCH scheduling.
11046 * Ret : S16 (ROK/RFAILED)
11052 **********************************************************/
11054 PRIVATE S16 rgSCHCmnUlCalcAvailBw
11057 RgrCellCfg *cellCfg,
11063 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
11065 RgrCellCfg *cellCfg;
11072 U8 ulBw = cell->bwCfg.ulTotalBw;
11073 U8 n2Rb = cell->pucchCfg.resourceSize;
11074 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
11075 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
11076 U8 n1Cs = cell->pucchCfg.cyclicShift;
11082 U8 exclRb; /* RBs to exclude */
11086 U16 numOfN3PucchRb;
11087 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11090 TRC2(rgSCHCmnUlCalcAvailBw);
11092 if (cell->isCpUlExtend)
11097 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
11099 totalCce = cell->dynCfiCb.cfi2NCceTbl[0][cfi];
11101 n1Max = n1Pucch + totalCce-1;
11103 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11105 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11106 n1Rb = (U8)((n1Max - n1RbPart) / n1PerRb);
11107 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11109 /* get the total Number of RB's to be excluded for PUSCH */
11111 if(n1Pucch < n1RbPart)
11117 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11119 /*Support for PUCCH Format 3*/
11121 if (cell->isPucchFormat3Sptd)
11123 numOfN3PucchRb = RGSCH_CEIL(cellSch->dl.maxUePerDlSf,5);
11124 exclRb = exclRb + numOfN3PucchRb;
11127 puschRbStart = exclRb/2 + 1;
11131 #ifndef ALIGN_64BIT
11132 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%ld:%d:%d:%d:%d:%d]\n",
11133 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11135 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%d:%d:%d:%d:%d:%d]\n",
11136 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11140 if (puschRbStart*2 >= ulBw)
11142 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11146 *rbStartRef = puschRbStart;
11147 *bwAvailRef = ulBw - puschRbStart * 2;
11149 if(cell->pucchCfg.maxPucchRb !=0 &&
11150 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11152 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11161 /***********************************************************
11163 * Func : rgSCHCmnUlCellInit
11165 * Desc : Uplink scheduler initialisation for cell.
11173 **********************************************************/
11175 PRIVATE S16 rgSCHCmnUlCellInit
11178 RgrCellCfg *cellCfg
11181 PRIVATE S16 rgSCHCmnUlCellInit(cell, cellCfg)
11183 RgrCellCfg *cellCfg;
11187 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11188 U8 maxUePerUlSf = cellCfg->maxUePerUlSf;
11190 /* Added configuration for maximum number of MSG3s */
11191 U8 maxMsg3PerUlSf = cellCfg->maxMsg3PerUlSf;
11193 U8 maxUlBwPerUe = cellCfg->maxUlBwPerUe;
11194 U8 sbSize = cellCfg->puschSubBand.size;
11202 U16 ulDlCfgIdx = cell->ulDlCfgIdx;
11203 /* [ccpu00127294]-MOD-Change the max Ul subfrms size in TDD */
11204 U8 maxSubfrms = 2 * rgSchTddNumUlSf[ulDlCfgIdx];
11205 U8 ulToDlMap[12] = {0}; /* maximum 6 Subframes in UL * 2 */
11206 U8 maxUlsubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
11207 [RGSCH_NUM_SUB_FRAMES-1];
11211 U8 maxSubfrms = RG_SCH_CMN_UL_NUM_SF;
11217 #if (defined(LTE_L2_MEAS) )
11218 Inst inst = cell->instIdx;
11219 #endif /* #if (defined(LTE_L2_MEAS) || defined(DEBUGP) */
11220 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
11222 TRC2(rgSCHCmnUlCellInit);
11224 cellUl->maxUeNewTxPerTti = cellCfg->maxUlUeNewTxPerTti;
11225 if (maxUePerUlSf == 0)
11227 maxUePerUlSf = RG_SCH_CMN_MAX_UE_PER_UL_SF;
11230 if (maxMsg3PerUlSf == 0)
11232 maxMsg3PerUlSf = RG_SCH_CMN_MAX_MSG3_PER_UL_SF;
11234 /* fixed the problem while sending raRsp
11235 * if maxMsg3PerUlSf is greater than
11236 * RGSCH_MAX_RNTI_PER_RARNTI
11238 if(maxMsg3PerUlSf > RGSCH_MAX_RNTI_PER_RARNTI)
11240 maxMsg3PerUlSf = RGSCH_MAX_RNTI_PER_RARNTI;
11243 if(maxMsg3PerUlSf > maxUePerUlSf)
11245 maxMsg3PerUlSf = maxUePerUlSf;
11248 /*cellUl->maxAllocPerUlSf = maxUePerUlSf + maxMsg3PerUlSf;*/
11249 /*Max MSG3 should be a subset of Max UEs*/
11250 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11251 cellUl->maxMsg3PerUlSf = maxMsg3PerUlSf;
11253 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11255 /* Fix: MUE_PERTTI_UL syed validating Cell Configuration */
11256 if (cellUl->maxAllocPerUlSf < cellUl->maxUeNewTxPerTti)
11258 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
11259 "FAILED: MaxUePerUlSf(%u) < MaxUlUeNewTxPerTti(%u)",
11260 cellUl->maxAllocPerUlSf,
11261 cellUl->maxUeNewTxPerTti);
11267 for(idx = 0; idx < RGSCH_SF_ALLOC_SIZE; idx++)
11269 for(idx = 0; idx < RGSCH_NUM_SUB_FRAMES; idx++)
11273 ret = rgSCHUtlAllocSBuf(inst, (Data **)&(cell->sfAllocArr[idx].
11274 ulUeInfo.ulAllocInfo), (cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc)));
11277 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation failed ");
11282 if (maxUlBwPerUe == 0)
11284 /* ccpu00139362- Setting to configured UL BW instead of MAX BW(100)*/
11285 maxUlBwPerUe = cell->bwCfg.ulTotalBw;
11287 cellUl->maxUlBwPerUe = maxUlBwPerUe;
11289 /* FOR RG_SCH_CMN_EXT_CP_SUP */
11290 if (!cellCfg->isCpUlExtend)
11292 cellUl->ulNumRePerRb = 12 * (14 - RGSCH_UL_SYM_DMRS_SRS);
11296 cellUl->ulNumRePerRb = 12 * (12 - RGSCH_UL_SYM_DMRS_SRS);
11299 if (sbSize != rgSchCmnMult235Tbl[sbSize].match)
11301 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Invalid subband size %d", sbSize);
11304 //Setting the subband size to 4 which is size of VRBG in 5GTF
11306 sbSize = MAX_5GTF_VRBG_SIZE;
11309 maxSbPerUe = maxUlBwPerUe / sbSize;
11310 if (maxSbPerUe == 0)
11312 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnUlCellInit(): "
11313 "maxUlBwPerUe/sbSize is zero");
11316 cellUl->maxSbPerUe = rgSchCmnMult235Tbl[maxSbPerUe].prvMatch;
11318 /* CQI related updations */
11319 if ((!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->ulCmnCodeRate.ccchCqi))
11320 || (!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->trgUlCqi.trgCqi)))
11322 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnUlCellInit(): "
11326 cellUl->dfltUlCqi = cellCfg->ulCmnCodeRate.ccchCqi;
11328 /* Changed the logic to determine maxUlCqi.
11329 * For a 16qam UE, maxUlCqi is the CQI Index at which
11330 * efficiency is as close as possible to RG_SCH_MAX_CODE_RATE_16QAM
11331 * Refer to 36.213-8.6.1 */
11332 for (i = RG_SCH_CMN_UL_NUM_CQI - 1;i > 0; --i)
11334 RLOG_ARG2(L_INFO,DBG_CELLID,cell->cellId,
11337 rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i]);
11338 #ifdef MAC_SCH_STATS
11339 /* ccpu00128489 ADD Update mcs in hqFailStats here instead of at CRC
11340 * since CQI to MCS mapping does not change. The only exception is for
11341 * ITBS = 19 where the MCS can be 20 or 21 based on the UE cat. We
11342 * choose 20, instead of 21, ie UE_CAT_3 */
11343 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11344 RG_SCH_CMN_UL_TBS_TO_MCS(iTbs, hqFailStats.ulCqiStat[i - 1].mcs);
11347 for (i = RG_SCH_CMN_UL_NUM_CQI - 1; i != 0; --i)
11349 /* Fix for ccpu00123912*/
11350 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11351 if (iTbs <= RGSCH_UL_16QAM_MAX_ITBS) /* corresponds to 16QAM */
11353 RLOG_ARG1(L_INFO,DBG_CELLID,cell->cellId,
11354 "16 QAM CQI %u", i);
11355 cellUl->max16qamCqi = i;
11361 /* Precompute useful values for RA msg3 */
11362 ret = rgSCHCmnPrecompEmtcMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11363 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11370 /* Precompute useful values for RA msg3 */
11371 ret = rgSCHCmnPrecompMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11372 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11378 cellUl->sbSize = sbSize;
11381 cellUl->numUlSubfrms = maxSubfrms;
11383 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->ulSfArr,
11384 cellUl->numUlSubfrms * sizeof(RgSchUlSf));
11388 cellUl->numUlSubfrms = 0;
11392 /* store the DL subframe corresponding to the PUSCH offset
11393 * in their respective UL subframe */
11394 for(i=0; i < RGSCH_NUM_SUB_FRAMES; i++)
11396 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][i] != 0)
11398 subfrm = (i + rgSchTddPuschTxKTbl[ulDlCfgIdx][i]) % \
11399 RGSCH_NUM_SUB_FRAMES;
11400 subfrm = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subfrm]-1;
11401 dlIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][i]-1;
11402 RGSCH_ARRAY_BOUND_CHECK( cell->instIdx, ulToDlMap, subfrm);
11403 ulToDlMap[subfrm] = dlIdx;
11406 /* Copy the information in the remaining UL subframes based
11407 * on number of HARQ processes */
11408 for(i=maxUlsubfrms; i < maxSubfrms; i++)
11410 subfrm = i-maxUlsubfrms;
11411 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, i);
11412 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, subfrm)
11413 ulToDlMap[i] = ulToDlMap[subfrm];
11417 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++)
11420 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11422 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11431 cell->ulAvailBw = bwAvail;
11434 numSb = bwAvail/sbSize;
11436 cell->dynCfiCb.bwInfo[cfi].startRb = rbStart;
11437 cell->dynCfiCb.bwInfo[cfi].numSb = numSb;
11440 if(0 == cell->dynCfiCb.maxCfi)
11442 RLOG_ARG3(L_ERROR,DBG_CELLID,cell->cellId,
11443 "Incorrect Default CFI(%u), maxCfi(%u), maxPucchRb(%d)",
11444 cellSch->cfiCfg.cfi, cell->dynCfiCb.maxCfi,
11445 cell->pucchCfg.maxPucchRb);
11451 cellUl->dmrsArrSize = cell->dynCfiCb.bwInfo[1].numSb;
11452 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->dmrsArr,
11453 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11458 for (i = 0; i < cellUl->dmrsArrSize; ++i)
11460 cellUl->dmrsArr[i] = cellCfg->puschSubBand.dmrs[i];
11463 /* Init subframes */
11464 for (i = 0; i < maxSubfrms; ++i)
11466 ret = rgSCHUtlUlSfInit(cell, &cellUl->ulSfArr[i], i,
11467 cellUl->maxAllocPerUlSf);
11470 for (; i != 0; --i)
11472 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[i-1]);
11474 /* ccpu00117052 - MOD - Passing double pointer
11475 for proper NULLP assignment*/
11476 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)(&(cellUl->dmrsArr)),
11477 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11479 /* ccpu00117052 - MOD - Passing double pointer
11480 for proper NULLP assignment*/
11481 rgSCHUtlFreeSBuf(cell->instIdx,
11482 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11487 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cellUl);
11492 * @brief Scheduler processing on cell configuration.
11496 * Function : rgSCHCmnRgrCellCfg
11498 * This function does requisite initialisation
11499 * and setup for scheduler1 when a cell is
11502 * @param[in] RgSchCellCb *cell
11503 * @param[in] RgrCellCfg *cellCfg
11504 * @param[out] RgSchErrInfo *err
11510 PUBLIC S16 rgSCHCmnRgrCellCfg
11513 RgrCellCfg *cellCfg,
11517 PUBLIC S16 rgSCHCmnRgrCellCfg(cell, cellCfg, err)
11519 RgrCellCfg *cellCfg;
11524 RgSchCmnCell *cellSch;
11525 TRC2(rgSCHCmnRgrCellCfg);
11527 /* As part of RGR cell configuration, validate the CRGCellCfg
11528 * There is no trigger for crgCellCfg from SC1 */
11529 /* Removed failure check for Extended CP */
11531 if (((ret = rgSCHUtlAllocSBuf(cell->instIdx,
11532 (Data**)&(cell->sc.sch), (sizeof(RgSchCmnCell)))) != ROK))
11534 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
11535 "Memory allocation FAILED");
11536 err->errCause = RGSCHERR_SCH_CFG;
11539 cellSch = (RgSchCmnCell *)(cell->sc.sch);
11540 cellSch->cfiCfg = cellCfg->cfiCfg;
11541 cellSch->trgUlCqi.trgCqi = cellCfg->trgUlCqi.trgCqi;
11542 /* Initialize the scheduler refresh timer queues */
11543 cellSch->tmrTqCp.nxtEnt = 0;
11544 cellSch->tmrTqCp.tmrLen = RG_SCH_CMN_NUM_REFRESH_Q;
11546 /* RACHO Intialize the RACH ded Preamble Information */
11547 rgSCHCmnCfgRachDedPrm(cell);
11549 /* Initialize 'Np' value for each 'p' used for
11550 * HARQ ACK/NACK reception */
11551 rgSCHCmnDlNpValInit(cell);
11554 /* Initialize 'Np' value for each 'p' used for
11555 * HARQ ACK/NACK reception */
11557 rgSCHCmnDlNpValInit(cell);
11560 /* Now perform uplink related initializations */
11561 ret = rgSCHCmnUlCellInit(cell, cellCfg);
11564 /* There is no downlink deinit to be performed */
11565 err->errCause = RGSCHERR_SCH_CFG;
11568 ret = rgSCHCmnDlRgrCellCfg(cell, cellCfg, err);
11571 err->errCause = RGSCHERR_SCH_CFG;
11574 /* DL scheduler has no initializations to make */
11575 /* As of now DL scheduler always returns ROK */
11577 rgSCHCmnGetDciFrmtSizes(cell);
11578 rgSCHCmnGetCqiDciFrmt2AggrLvl(cell);
11580 rgSCHCmnGetEmtcDciFrmtSizes(cell);
11581 rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl(cell);
11582 #endif /* EMTC_ENABLE */
11585 if(TRUE == cellCfg->emtcEnable)
11587 cellSch->apisEmtcUl = &rgSchEmtcUlSchdTbl[0];
11588 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11595 cellSch->apisUl = &rgSchUlSchdTbl[RG_SCH_CMN_GET_UL_SCHED_TYPE(cell)];
11596 ret = cellSch->apisUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11602 if(TRUE == cellCfg->emtcEnable)
11604 cellSch->apisEmtcDl = &rgSchEmtcDlSchdTbl[0];
11605 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11612 cellSch->apisDl = &rgSchDlSchdTbl[RG_SCH_CMN_GET_DL_SCHED_TYPE(cell)];
11614 /* Perform SPS specific initialization for the cell */
11615 ret = rgSCHCmnSpsCellCfg(cell, cellCfg, err);
11621 ret = cellSch->apisDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11626 rgSCHCmnInitVars(cell);
11629 } /* rgSCHCmnRgrCellCfg*/
11633 * @brief This function handles the reconfiguration of cell.
11637 * Function: rgSCHCmnRgrCellRecfg
11638 * Purpose: Update the reconfiguration parameters.
11640 * Invoked by: Scheduler
11642 * @param[in] RgSchCellCb* cell
11647 PUBLIC S16 rgSCHCmnRgrCellRecfg
11650 RgrCellRecfg *recfg,
11654 PUBLIC S16 rgSCHCmnRgrCellRecfg(cell, recfg, err)
11656 RgrCellRecfg *recfg;
11661 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11662 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11664 TRC2(rgSCHCmnRgrCellRecfg);
11666 if (recfg->recfgTypes & RGR_CELL_UL_CMNRATE_RECFG)
11668 U8 oldCqi = cellUl->dfltUlCqi;
11669 if (!RG_SCH_CMN_UL_IS_CQI_VALID(recfg->ulCmnCodeRate.ccchCqi))
11671 err->errCause = RGSCHERR_SCH_CFG;
11672 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnRgrCellRecfg(): "
11676 cellUl->dfltUlCqi = recfg->ulCmnCodeRate.ccchCqi;
11677 ret = rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11678 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11681 cellUl->dfltUlCqi = oldCqi;
11682 rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11683 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11688 if (recfg->recfgTypes & RGR_CELL_DL_CMNRATE_RECFG)
11690 if (rgSCHCmnDlCnsdrCmnRt(cell, &recfg->dlCmnCodeRate) != ROK)
11692 err->errCause = RGSCHERR_SCH_CFG;
11698 if(TRUE == cell->emtcEnable)
11700 /* Invoke UL sched for cell Recfg */
11701 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11707 /* Invoke DL sched for cell Recfg */
11708 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11717 /* Invoke UL sched for cell Recfg */
11718 ret = cellSch->apisUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11724 /* Invoke DL sched for cell Recfg */
11725 ret = cellSch->apisDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11732 if (recfg->recfgTypes & RGR_CELL_DLFS_RECFG)
11734 ret = cellSch->apisDlfs->rgSCHDlfsCellRecfg(cell, recfg, err);
11739 cellSch->dl.isDlFreqSel = recfg->dlfsRecfg.isDlFreqSel;
11742 if (recfg->recfgTypes & RGR_CELL_PWR_RECFG)
11744 ret = rgSCHPwrCellRecfg(cell, recfg);
11754 /***********************************************************
11756 * Func : rgSCHCmnUlCellDeinit
11758 * Desc : Uplink scheduler de-initialisation for cell.
11766 **********************************************************/
11768 PRIVATE Void rgSCHCmnUlCellDeinit
11773 PRIVATE Void rgSCHCmnUlCellDeinit(cell)
11777 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11780 U8 maxSubfrms = cellUl->numUlSubfrms;
11783 CmLList *lnk = NULLP;
11784 RgSchL2MeasCb *measCb;
11786 TRC2(rgSCHCmnUlCellDeinit);
11789 for(ulSfIdx = 0; ulSfIdx < RGSCH_SF_ALLOC_SIZE; ulSfIdx++)
11791 for(ulSfIdx = 0; ulSfIdx < RGSCH_NUM_SUB_FRAMES; ulSfIdx++)
11794 if(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo != NULLP)
11796 /* ccpu00117052 - MOD - Passing double pointer
11797 for proper NULLP assignment*/
11798 rgSCHUtlFreeSBuf(cell->instIdx,
11799 (Data **)(&(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo)),
11800 cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc));
11802 /* ccpu00117052 - DEL - removed explicit NULLP assignment
11803 as it is done in above utility function */
11806 /* Free the memory allocated to measCb */
11807 lnk = cell->l2mList.first;
11808 while(lnk != NULLP)
11810 measCb = (RgSchL2MeasCb *)lnk->node;
11811 cmLListDelFrm(&cell->l2mList, lnk);
11813 /* ccpu00117052 - MOD - Passing double pointer
11814 for proper NULLP assignment*/
11815 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&measCb,\
11816 sizeof(RgSchL2MeasCb));
11819 if (cellUl->dmrsArr != NULLP)
11821 /* ccpu00117052 - MOD - Passing double pointer
11822 for proper NULLP assignment*/
11823 rgSCHUtlFreeSBuf(cell->instIdx,(Data **)(&(cellUl->dmrsArr)),
11824 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11826 /* De-init subframes */
11828 for (ulSfIdx = 0; ulSfIdx < maxSubfrms; ++ulSfIdx)
11830 for (ulSfIdx = 0; ulSfIdx < RG_SCH_CMN_UL_NUM_SF; ++ulSfIdx)
11833 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[ulSfIdx]);
11837 if (cellUl->ulSfArr != NULLP)
11839 /* ccpu00117052 - MOD - Passing double pointer
11840 for proper NULLP assignment*/
11841 rgSCHUtlFreeSBuf(cell->instIdx,
11842 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11850 * @brief Scheduler processing for cell delete.
11854 * Function : rgSCHCmnCellDel
11856 * This functions de-initialises and frees memory
11857 * taken up by scheduler1 for the entire cell.
11859 * @param[in] RgSchCellCb *cell
11863 PUBLIC Void rgSCHCmnCellDel
11868 PUBLIC Void rgSCHCmnCellDel(cell)
11872 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11873 TRC2(rgSCHCmnCellDel);
11878 if (cellSch == NULLP)
11882 /* Perform the deinit for the UL scheduler */
11883 rgSCHCmnUlCellDeinit(cell);
11885 if(TRUE == cell->emtcEnable)
11887 if (cellSch->apisEmtcUl)
11889 cellSch->apisEmtcUl->rgSCHFreeUlCell(cell);
11893 if (cellSch->apisUl)
11895 /* api pointer checks added (here and below in
11896 * this function). pl check. - antriksh */
11897 cellSch->apisUl->rgSCHFreeUlCell(cell);
11900 /* Perform the deinit for the DL scheduler */
11901 cmLListInit(&cellSch->dl.taLst);
11902 if (cellSch->apisDl)
11904 cellSch->apisDl->rgSCHFreeDlCell(cell);
11907 if (cellSch->apisEmtcDl)
11909 rgSCHEmtcInitTaLst(&cellSch->dl);
11911 cellSch->apisEmtcDl->rgSCHFreeDlCell(cell);
11915 /* DLFS de-initialization */
11916 if (cellSch->dl.isDlFreqSel && cellSch->apisDlfs)
11918 cellSch->apisDlfs->rgSCHDlfsCellDel(cell);
11921 rgSCHPwrCellDel(cell);
11923 rgSCHCmnSpsCellDel(cell);
11926 /* ccpu00117052 - MOD - Passing double pointer
11927 for proper NULLP assignment*/
11928 rgSCHUtlFreeSBuf(cell->instIdx,
11929 (Data**)(&(cell->sc.sch)), (sizeof(RgSchCmnCell)));
11931 } /* rgSCHCmnCellDel */
11935 * @brief This function validates QOS parameters for DL.
11939 * Function: rgSCHCmnValidateDlQos
11940 * Purpose: This function validates QOS parameters for DL.
11942 * Invoked by: Scheduler
11944 * @param[in] CrgLchQosCfg *dlQos
11949 PRIVATE S16 rgSCHCmnValidateDlQos
11951 RgrLchQosCfg *dlQos
11954 PRIVATE S16 rgSCHCmnValidateDlQos(dlQos)
11955 RgrLchQosCfg *dlQos;
11958 U8 qci = dlQos->qci;
11960 TRC2(rgSCHCmnValidateDlQos);
11962 if ( qci < RG_SCH_CMN_MIN_QCI || qci > RG_SCH_CMN_MAX_QCI )
11967 if ((qci >= RG_SCH_CMN_GBR_QCI_START) &&
11968 (qci <= RG_SCH_CMN_GBR_QCI_END))
11970 if ((dlQos->mbr == 0) || (dlQos->mbr < dlQos->gbr))
11979 * @brief Scheduler invocation on logical channel addition.
11983 * Function : rgSCHCmnRgrLchCfg
11985 * This functions does required processing when a new
11986 * (dedicated) logical channel is added. Assumes lcg
11987 * pointer in ulLc is set.
11989 * @param[in] RgSchCellCb *cell
11990 * @param[in] RgSchUeCb *ue
11991 * @param[in] RgSchDlLcCb *dlLc
11992 * @param[int] RgrLchCfg *lcCfg
11993 * @param[out] RgSchErrInfo *err
11999 PUBLIC S16 rgSCHCmnRgrLchCfg
12008 PUBLIC S16 rgSCHCmnRgrLchCfg(cell, ue, dlLc, lcCfg, err)
12018 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12020 TRC2(rgSCHCmnRgrLchCfg);
12022 ret = rgSCHUtlAllocSBuf(cell->instIdx,
12023 (Data**)&((dlLc)->sch), (sizeof(RgSchCmnDlSvc)));
12026 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRgrLchCfg(): "
12027 "SCH struct alloc failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12028 err->errCause = RGSCHERR_SCH_CFG;
12031 if(lcCfg->lcType != CM_LTE_LCH_DCCH)
12033 ret = rgSCHCmnValidateDlQos(&lcCfg->dlInfo.dlQos);
12036 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSchCmnCrgLcCfg(): "
12037 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12038 err->errCause = RGSCHERR_SCH_CFG;
12041 /* Perform DL service activation in the scheduler */
12042 ((RgSchCmnDlSvc *)(dlLc->sch))->qci = lcCfg->dlInfo.dlQos.qci;
12043 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = rgSchCmnDlQciPrio[lcCfg->dlInfo.dlQos.qci - 1];
12044 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcCfg->dlInfo.dlQos.gbr * \
12045 RG_SCH_CMN_REFRESH_TIME)/100;
12046 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcCfg->dlInfo.dlQos.mbr * \
12047 RG_SCH_CMN_REFRESH_TIME)/100;
12051 /*assigning highest priority to DCCH */
12052 ((RgSchCmnDlSvc *)(dlLc->sch))->prio=RG_SCH_CMN_DCCH_PRIO;
12055 dlLc->lcType=lcCfg->lcType;
12058 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12060 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcCfg(cell, ue,dlLc ,lcCfg, err);
12069 ret = cellSch->apisDl->rgSCHRgrDlLcCfg(cell, ue, dlLc, lcCfg, err);
12077 if(TRUE == ue->isEmtcUe)
12079 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12088 ret = cellSch->apisUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12098 rgSCHSCellDlLcCfg(cell, ue, dlLc);
12104 if(lcCfg->dlInfo.dlSpsCfg.isSpsEnabled)
12106 /* Invoke SPS module if SPS is enabled for the service */
12107 ret = rgSCHCmnSpsDlLcCfg(cell, ue, dlLc, lcCfg, err);
12110 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "rgSchCmnRgrLchCfg(): "
12111 "SPS configuration failed for DL LC for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12112 err->errCause = RGSCHERR_SCH_CFG;
12122 * @brief Scheduler invocation on logical channel addition.
12126 * Function : rgSCHCmnRgrLchRecfg
12128 * This functions does required processing when an existing
12129 * (dedicated) logical channel is reconfigured. Assumes lcg
12130 * pointer in ulLc is set to the old value.
12131 * Independent of whether new LCG is meant to be configured,
12132 * the new LCG scheduler information is accessed and possibly modified.
12134 * @param[in] RgSchCellCb *cell
12135 * @param[in] RgSchUeCb *ue
12136 * @param[in] RgSchDlLcCb *dlLc
12137 * @param[int] RgrLchRecfg *lcRecfg
12138 * @param[out] RgSchErrInfo *err
12144 PUBLIC S16 rgSCHCmnRgrLchRecfg
12149 RgrLchRecfg *lcRecfg,
12153 PUBLIC S16 rgSCHCmnRgrLchRecfg(cell, ue, dlLc, lcRecfg, err)
12157 RgrLchRecfg *lcRecfg;
12162 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12164 TRC2(rgSCHCmnRgrLchRecfg)
12166 if(dlLc->lcType != CM_LTE_LCH_DCCH)
12168 ret = rgSCHCmnValidateDlQos(&lcRecfg->dlRecfg.dlQos);
12172 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
12173 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12174 err->errCause = RGSCHERR_SCH_CFG;
12177 if (((RgSchCmnDlSvc *)(dlLc->sch))->qci != lcRecfg->dlRecfg.dlQos.qci)
12179 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Qci, hence lc Priority change "
12180 "not supported for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12181 err->errCause = RGSCHERR_SCH_CFG;
12184 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcRecfg->dlRecfg.dlQos.gbr * \
12185 RG_SCH_CMN_REFRESH_TIME)/100;
12186 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcRecfg->dlRecfg.dlQos.mbr * \
12187 RG_SCH_CMN_REFRESH_TIME)/100;
12191 /*assigning highest priority to DCCH */
12192 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = RG_SCH_CMN_DCCH_PRIO;
12196 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12198 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12203 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12212 ret = cellSch->apisDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12217 ret = cellSch->apisUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12225 if (lcRecfg->recfgTypes & RGR_DL_LC_SPS_RECFG)
12227 /* Invoke SPS module if SPS is enabled for the service */
12228 if(lcRecfg->dlRecfg.dlSpsRecfg.isSpsEnabled)
12230 ret = rgSCHCmnSpsDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12233 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"SPS re-configuration not "
12234 "supported for dlLC Ignore this CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12245 * @brief Scheduler invocation on logical channel addition.
12249 * Function : rgSCHCmnRgrLcgCfg
12251 * This functions does required processing when a new
12252 * (dedicated) logical channel is added. Assumes lcg
12253 * pointer in ulLc is set.
12255 * @param[in] RgSchCellCb *cell,
12256 * @param[in] RgSchUeCb *ue,
12257 * @param[in] RgSchLcgCb *lcg,
12258 * @param[in] RgrLcgCfg *lcgCfg,
12259 * @param[out] RgSchErrInfo *err
12265 PUBLIC S16 rgSCHCmnRgrLcgCfg
12274 PUBLIC S16 rgSCHCmnRgrLcgCfg(cell, ue, lcg, lcgCfg, err)
12283 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12284 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCfg->ulInfo.lcgId].sch));
12286 TRC2(rgSCHCmnRgrLcgCfg);
12288 ulLcg->cfgdGbr = (lcgCfg->ulInfo.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12289 ulLcg->effGbr = ulLcg->cfgdGbr;
12290 ulLcg->deltaMbr = ((lcgCfg->ulInfo.mbr - lcgCfg->ulInfo.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12291 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12294 if(TRUE == ue->isEmtcUe)
12296 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12305 ret = cellSch->apisUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12311 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12313 /* Indicate MAC that this LCG is GBR LCG */
12314 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcgCfg->ulInfo.lcgId, TRUE);
12320 * @brief Scheduler invocation on logical channel addition.
12324 * Function : rgSCHCmnRgrLcgRecfg
12326 * This functions does required processing when a new
12327 * (dedicated) logical channel is added. Assumes lcg
12328 * pointer in ulLc is set.
12330 * @param[in] RgSchCellCb *cell,
12331 * @param[in] RgSchUeCb *ue,
12332 * @param[in] RgSchLcgCb *lcg,
12333 * @param[in] RgrLcgRecfg *reCfg,
12334 * @param[out] RgSchErrInfo *err
12340 PUBLIC S16 rgSCHCmnRgrLcgRecfg
12345 RgrLcgRecfg *reCfg,
12349 PUBLIC S16 rgSCHCmnRgrLcgRecfg(cell, ue, lcg, reCfg, err)
12353 RgrLcgRecfg *reCfg;
12358 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12359 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[reCfg->ulRecfg.lcgId].sch));
12361 TRC2(rgSCHCmnRgrLcgRecfg);
12363 ulLcg->cfgdGbr = (reCfg->ulRecfg.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12364 ulLcg->effGbr = ulLcg->cfgdGbr;
12365 ulLcg->deltaMbr = ((reCfg->ulRecfg.mbr - reCfg->ulRecfg.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12366 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12369 if(TRUE == ue->isEmtcUe)
12371 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12380 ret = cellSch->apisUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12386 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12388 /* Indicate MAC that this LCG is GBR LCG */
12389 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, TRUE);
12393 /* In case of RAB modification */
12394 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, FALSE);
12399 /***********************************************************
12401 * Func : rgSCHCmnRgrLchDel
12403 * Desc : Scheduler handling for a (dedicated)
12404 * uplink logical channel being deleted.
12411 **********************************************************/
12413 PUBLIC S16 rgSCHCmnRgrLchDel
12421 PUBLIC S16 rgSCHCmnRgrLchDel(cell, ue, lcId, lcgId)
12428 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12429 TRC2(rgSCHCmnRgrLchDel);
12431 if(TRUE == ue->isEmtcUe)
12433 cellSch->apisEmtcUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12438 cellSch->apisUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12443 /***********************************************************
12445 * Func : rgSCHCmnLcgDel
12447 * Desc : Scheduler handling for a (dedicated)
12448 * uplink logical channel being deleted.
12456 **********************************************************/
12458 PUBLIC Void rgSCHCmnLcgDel
12465 PUBLIC Void rgSCHCmnLcgDel(cell, ue, lcg)
12471 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12472 RgSchCmnLcg *lcgCmn = RG_SCH_CMN_GET_UL_LCG(lcg);
12473 TRC2(rgSCHCmnLcgDel);
12475 if (lcgCmn == NULLP)
12480 if (RGSCH_IS_GBR_BEARER(lcgCmn->cfgdGbr))
12482 /* Indicate MAC that this LCG is GBR LCG */
12483 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcg->lcgId, FALSE);
12487 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
12489 rgSCHCmnSpsUlLcgDel(cell, ue, lcg);
12491 #endif /* LTEMAC_SPS */
12493 lcgCmn->effGbr = 0;
12494 lcgCmn->reportedBs = 0;
12495 lcgCmn->cfgdGbr = 0;
12496 /* set lcg bs to 0. Deletion of control block happens
12497 * at the time of UE deletion. */
12500 if(TRUE == ue->isEmtcUe)
12502 cellSch->apisEmtcUl->rgSCHFreeUlLcg(cell, ue, lcg);
12507 cellSch->apisUl->rgSCHFreeUlLcg(cell, ue, lcg);
12514 * @brief This function deletes a service from scheduler.
12518 * Function: rgSCHCmnFreeDlLc
12519 * Purpose: This function is made available through a FP for
12520 * making scheduler aware of a service being deleted from UE.
12522 * Invoked by: BO and Scheduler
12524 * @param[in] RgSchCellCb* cell
12525 * @param[in] RgSchUeCb* ue
12526 * @param[in] RgSchDlLcCb* svc
12531 PUBLIC Void rgSCHCmnFreeDlLc
12538 PUBLIC Void rgSCHCmnFreeDlLc(cell, ue, svc)
12544 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12545 TRC2(rgSCHCmnFreeDlLc);
12546 if (svc->sch == NULLP)
12551 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12553 cellSch->apisEmtcDl->rgSCHFreeDlLc(cell, ue, svc);
12558 cellSch->apisDl->rgSCHFreeDlLc(cell, ue, svc);
12564 rgSCHSCellDlLcDel(cell, ue, svc);
12569 /* If SPS service, invoke SPS module */
12570 if (svc->dlLcSpsCfg.isSpsEnabled)
12572 rgSCHCmnSpsDlLcDel(cell, ue, svc);
12576 /* ccpu00117052 - MOD - Passing double pointer
12577 for proper NULLP assignment*/
12578 rgSCHUtlFreeSBuf(cell->instIdx,
12579 (Data**)(&(svc->sch)), (sizeof(RgSchCmnDlSvc)));
12582 rgSCHLaaDeInitDlLchCb(cell, svc);
12591 * @brief This function Processes the Final Allocations
12592 * made by the RB Allocator against the requested
12593 * CCCH SDURetx Allocations.
12597 * Function: rgSCHCmnDlCcchSduRetxFnlz
12598 * Purpose: This function Processes the Final Allocations
12599 * made by the RB Allocator against the requested
12600 * CCCH Retx Allocations.
12601 * Scans through the scheduled list of ccchSdu retrans
12602 * fills the corresponding pdcch, adds the hqProc to
12603 * the corresponding SubFrm and removes the hqP from
12606 * Invoked by: Common Scheduler
12608 * @param[in] RgSchCellCb *cell
12609 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12614 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz
12617 RgSchCmnDlRbAllocInfo *allocInfo
12620 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo)
12622 RgSchCmnDlRbAllocInfo *allocInfo;
12626 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12627 RgSchDlRbAlloc *rbAllocInfo;
12628 RgSchDlHqProcCb *hqP;
12630 TRC2(rgSCHCmnDlCcchSduRetxFnlz);
12632 /* Traverse through the Scheduled Retx List */
12633 node = allocInfo->ccchSduAlloc.schdCcchSduRetxLst.first;
12636 hqP = (RgSchDlHqProcCb *)(node->node);
12638 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
12640 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12642 /* Remove the HqP from cell's ccchSduRetxLst */
12643 cmLListDelFrm(&cmnCellDl->ccchSduRetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12644 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12646 /* Fix: syed dlAllocCb reset should be performed.
12647 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12648 rgSCHCmnDlUeResetTemp(ue, hqP);
12650 /* Fix: syed dlAllocCb reset should be performed.
12651 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12652 node = allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst.first;
12655 hqP = (RgSchDlHqProcCb *)(node->node);
12658 /* reset the UE allocation Information */
12659 rgSCHCmnDlUeResetTemp(ue, hqP);
12665 * @brief This function Processes the Final Allocations
12666 * made by the RB Allocator against the requested
12667 * CCCH Retx Allocations.
12671 * Function: rgSCHCmnDlCcchRetxFnlz
12672 * Purpose: This function Processes the Final Allocations
12673 * made by the RB Allocator against the requested
12674 * CCCH Retx Allocations.
12675 * Scans through the scheduled list of msg4 retrans
12676 * fills the corresponding pdcch, adds the hqProc to
12677 * the corresponding SubFrm and removes the hqP from
12680 * Invoked by: Common Scheduler
12682 * @param[in] RgSchCellCb *cell
12683 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12688 PRIVATE Void rgSCHCmnDlCcchRetxFnlz
12691 RgSchCmnDlRbAllocInfo *allocInfo
12694 PRIVATE Void rgSCHCmnDlCcchRetxFnlz(cell, allocInfo)
12696 RgSchCmnDlRbAllocInfo *allocInfo;
12700 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12701 RgSchDlRbAlloc *rbAllocInfo;
12702 RgSchDlHqProcCb *hqP;
12704 TRC2(rgSCHCmnDlCcchRetxFnlz);
12706 /* Traverse through the Scheduled Retx List */
12707 node = allocInfo->msg4Alloc.schdMsg4RetxLst.first;
12710 hqP = (RgSchDlHqProcCb *)(node->node);
12711 raCb = hqP->hqE->raCb;
12712 rbAllocInfo = &raCb->rbAllocInfo;
12714 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12716 /* Remove the HqP from cell's msg4RetxLst */
12717 cmLListDelFrm(&cmnCellDl->msg4RetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12718 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12719 /* Fix: syed dlAllocCb reset should be performed.
12720 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12721 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12722 rgSCHCmnDlHqPResetTemp(hqP);
12724 /* Fix: syed dlAllocCb reset should be performed.
12725 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12726 node = allocInfo->msg4Alloc.nonSchdMsg4RetxLst.first;
12729 hqP = (RgSchDlHqProcCb *)(node->node);
12730 raCb = hqP->hqE->raCb;
12732 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12733 rgSCHCmnDlHqPResetTemp(hqP);
12740 * @brief This function Processes the Final Allocations
12741 * made by the RB Allocator against the requested
12742 * CCCH SDU tx Allocations.
12746 * Function: rgSCHCmnDlCcchSduTxFnlz
12747 * Purpose: This function Processes the Final Allocations
12748 * made by the RB Allocator against the requested
12749 * CCCH tx Allocations.
12750 * Scans through the scheduled list of CCCH SDU trans
12751 * fills the corresponding pdcch, adds the hqProc to
12752 * the corresponding SubFrm and removes the hqP from
12755 * Invoked by: Common Scheduler
12757 * @param[in] RgSchCellCb *cell
12758 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12763 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz
12766 RgSchCmnDlRbAllocInfo *allocInfo
12769 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo)
12771 RgSchCmnDlRbAllocInfo *allocInfo;
12776 RgSchDlRbAlloc *rbAllocInfo;
12777 RgSchDlHqProcCb *hqP;
12778 RgSchLchAllocInfo lchSchdData;
12779 TRC2(rgSCHCmnDlCcchSduTxFnlz);
12781 /* Traverse through the Scheduled Retx List */
12782 node = allocInfo->ccchSduAlloc.schdCcchSduTxLst.first;
12785 hqP = (RgSchDlHqProcCb *)(node->node);
12786 ueCb = hqP->hqE->ue;
12788 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
12790 /* fill the pdcch and HqProc */
12791 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12793 /* Remove the raCb from cell's toBeSchdLst */
12794 cmLListDelFrm(&cell->ccchSduUeLst, &ueCb->ccchSduLnk);
12795 ueCb->ccchSduLnk.node = (PTR)NULLP;
12797 /* Fix : Resetting this required to avoid complication
12798 * in reestablishment case */
12799 ueCb->dlCcchInfo.bo = 0;
12801 /* Indicate DHM of the CCCH LC scheduling */
12802 hqP->tbInfo[0].contResCe = NOTPRSNT;
12803 lchSchdData.lcId = 0;
12804 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12805 (RGSCH_MSG4_HDRSIZE);
12806 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12808 /* Fix: syed dlAllocCb reset should be performed.
12809 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12810 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12812 /* Fix: syed dlAllocCb reset should be performed.
12813 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12814 node = allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst.first;
12817 hqP = (RgSchDlHqProcCb *)(node->node);
12818 ueCb = hqP->hqE->ue;
12820 /* Release HqProc */
12821 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12822 /*Fix: Removing releasing of TB1 as it will not exist for CCCH SDU and hence caused a crash*/
12823 /*rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12824 /* reset the UE allocation Information */
12825 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12832 * @brief This function Processes the Final Allocations
12833 * made by the RB Allocator against the requested
12834 * CCCH tx Allocations.
12838 * Function: rgSCHCmnDlCcchTxFnlz
12839 * Purpose: This function Processes the Final Allocations
12840 * made by the RB Allocator against the requested
12841 * CCCH tx Allocations.
12842 * Scans through the scheduled list of msg4 trans
12843 * fills the corresponding pdcch, adds the hqProc to
12844 * the corresponding SubFrm and removes the hqP from
12847 * Invoked by: Common Scheduler
12849 * @param[in] RgSchCellCb *cell
12850 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12855 PRIVATE Void rgSCHCmnDlCcchTxFnlz
12858 RgSchCmnDlRbAllocInfo *allocInfo
12861 PRIVATE Void rgSCHCmnDlCcchTxFnlz(cell, allocInfo)
12863 RgSchCmnDlRbAllocInfo *allocInfo;
12868 RgSchDlRbAlloc *rbAllocInfo;
12869 RgSchDlHqProcCb *hqP;
12870 RgSchLchAllocInfo lchSchdData;
12871 TRC2(rgSCHCmnDlCcchTxFnlz);
12873 /* Traverse through the Scheduled Retx List */
12874 node = allocInfo->msg4Alloc.schdMsg4TxLst.first;
12877 hqP = (RgSchDlHqProcCb *)(node->node);
12878 raCb = hqP->hqE->raCb;
12880 rbAllocInfo = &raCb->rbAllocInfo;
12882 /* fill the pdcch and HqProc */
12883 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12884 /* MSG4 Fix Start */
12886 rgSCHRamRmvFrmRaInfoSchdLst(cell, raCb);
12889 /* Indicate DHM of the CCCH LC scheduling */
12890 lchSchdData.lcId = 0;
12891 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12892 (RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE);
12893 /* TRansmitting presence of cont Res CE across MAC-SCH interface to
12894 * identify CCCH SDU transmissions which need to be done
12896 * contention resolution CE*/
12897 hqP->tbInfo[0].contResCe = PRSNT_NODEF;
12898 /*Dont add lc if only cont res CE is being transmitted*/
12899 if(raCb->dlCcchInfo.bo)
12901 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12906 /* Fix: syed dlAllocCb reset should be performed.
12907 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12908 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12909 rgSCHCmnDlHqPResetTemp(hqP);
12911 node = allocInfo->msg4Alloc.nonSchdMsg4TxLst.first;
12914 hqP = (RgSchDlHqProcCb *)(node->node);
12915 raCb = hqP->hqE->raCb;
12917 rbAllocInfo = &raCb->rbAllocInfo;
12918 /* Release HqProc */
12919 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12920 /*Fix: Removing releasing of TB1 as it will not exist for MSG4 and hence caused a crash*/
12921 /* rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12922 /* reset the UE allocation Information */
12923 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12924 rgSCHCmnDlHqPResetTemp(hqP);
12931 * @brief This function calculates the BI Index to be sent in the Bi header
12935 * Function: rgSCHCmnGetBiIndex
12936 * Purpose: This function Processes utilizes the previous BI time value
12937 * calculated and the difference last BI sent time and current time. To
12938 * calculate the latest BI Index. It also considers the how many UE's
12939 * Unserved in this subframe.
12941 * Invoked by: Common Scheduler
12943 * @param[in] RgSchCellCb *cell
12944 * @param[in] U32 ueCount
12949 PUBLIC U8 rgSCHCmnGetBiIndex
12955 PUBLIC U8 rgSCHCmnGetBiIndex(cell, ueCount)
12960 S16 prevVal = 0; /* To Store Intermediate Value */
12961 U16 newBiVal = 0; /* To store Bi Value in millisecond */
12965 TRC2(rgSCHCmnGetBiIndex)
12967 if (cell->biInfo.prevBiTime != 0)
12970 if(cell->emtcEnable == TRUE)
12972 timeDiff =(RGSCH_CALC_SF_DIFF_EMTC(cell->crntTime, cell->biInfo.biTime));
12977 timeDiff =(RGSCH_CALC_SF_DIFF(cell->crntTime, cell->biInfo.biTime));
12980 prevVal = cell->biInfo.prevBiTime - timeDiff;
12986 newBiVal = RG_SCH_CMN_GET_BI_VAL(prevVal,ueCount);
12987 /* To be used next time when BI is calculated */
12989 if(cell->emtcEnable == TRUE)
12991 RGSCHCPYTIMEINFO_EMTC(cell->crntTime, cell->biInfo.biTime)
12996 RGSCHCPYTIMEINFO(cell->crntTime, cell->biInfo.biTime)
12999 /* Search the actual BI Index from table Backoff Parameters Value and
13000 * return that Index */
13003 if (rgSchCmnBiTbl[idx] > newBiVal)
13008 }while(idx < RG_SCH_CMN_NUM_BI_VAL-1);
13009 cell->biInfo.prevBiTime = rgSchCmnBiTbl[idx];
13010 /* For 16 Entries in Table 7.2.1 36.321.880 - 3 reserved so total 13 Entries */
13011 RETVALUE(idx); /* Returning reserved value from table UE treats it has 960 ms */
13012 } /* rgSCHCmnGetBiIndex */
13016 * @brief This function Processes the Final Allocations
13017 * made by the RB Allocator against the requested
13018 * RAR allocations. Assumption: The reuqested
13019 * allocations are always satisfied completely.
13020 * Hence no roll back.
13024 * Function: rgSCHCmnDlRaRspFnlz
13025 * Purpose: This function Processes the Final Allocations
13026 * made by the RB Allocator against the requested.
13027 * Takes care of PDCCH filling.
13029 * Invoked by: Common Scheduler
13031 * @param[in] RgSchCellCb *cell
13032 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13037 PRIVATE Void rgSCHCmnDlRaRspFnlz
13040 RgSchCmnDlRbAllocInfo *allocInfo
13043 PRIVATE Void rgSCHCmnDlRaRspFnlz(cell, allocInfo)
13045 RgSchCmnDlRbAllocInfo *allocInfo;
13049 RgSchDlRbAlloc *raRspAlloc;
13050 RgSchDlSf *subFrm = NULLP;
13054 RgSchRaReqInfo *raReq;
13056 RgSchUlAlloc *ulAllocRef=NULLP;
13057 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13058 U8 allocRapidCnt = 0;
13060 U32 msg3SchdIdx = 0;
13061 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
13065 TRC2(rgSCHCmnDlRaRspFnlz);
13067 for (rarCnt=0; rarCnt<RG_SCH_CMN_MAX_CMN_PDCCH; rarCnt++)
13069 raRspAlloc = &allocInfo->raRspAlloc[rarCnt];
13070 /* Having likely condition first for optimization */
13071 if (!raRspAlloc->pdcch)
13077 subFrm = raRspAlloc->dlSf;
13078 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13079 /* Corrected RACH handling for multiple RAPIDs per RARNTI */
13080 allocRapidCnt = raRspAlloc->numRapids;
13081 while (allocRapidCnt)
13083 raReq = (RgSchRaReqInfo *)(reqLst->first->node);
13084 /* RACHO: If dedicated preamble, then allocate UL Grant
13085 * (consequence of handover/pdcchOrder) and continue */
13086 if (RGSCH_IS_DEDPRM(cell, raReq->raReq.rapId))
13088 rgSCHCmnHdlHoPo(cell, &subFrm->raRsp[rarCnt].contFreeUeLst,
13090 cmLListDelFrm(reqLst, reqLst->first);
13092 /* ccpu00117052 - MOD - Passing double pointer
13093 for proper NULLP assignment*/
13094 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13095 sizeof(RgSchRaReqInfo));
13099 if(cell->overLoadBackOffEnab)
13100 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13101 cmLListDelFrm(reqLst, reqLst->first);
13103 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13104 sizeof(RgSchRaReqInfo));
13107 /* Attempt to include each RA request into the RSP */
13108 /* Any failure in the procedure is considered to */
13109 /* affect futher allocations in the same TTI. When */
13110 /* a failure happens, we break out and complete */
13111 /* the processing for random access */
13112 if (rgSCHRamCreateRaCb(cell, &raCb, &err) != ROK)
13116 /* Msg3 allocation request to USM */
13117 if (raReq->raReq.rapId < cell->rachCfg.sizeRaPreambleGrpA)
13121 /*ccpu00128820 - MOD - Msg3 alloc double delete issue*/
13122 rgSCHCmnMsg3GrntReq(cell, raCb->tmpCrnti, preamGrpA, \
13123 &(raCb->msg3HqProc), &ulAllocRef, &raCb->msg3HqProcId);
13124 if (ulAllocRef == NULLP)
13126 rgSCHRamDelRaCb(cell, raCb, TRUE);
13129 if (raReq->raReq.cqiPres)
13131 raCb->ccchCqi = raReq->raReq.cqiIdx;
13135 raCb->ccchCqi = cellDl->ccchCqi;
13137 raCb->rapId = raReq->raReq.rapId;
13138 raCb->ta.pres = TRUE;
13139 raCb->ta.val = raReq->raReq.ta;
13140 raCb->msg3Grnt = ulAllocRef->grnt;
13141 /* Populating the tpc value received */
13142 raCb->msg3Grnt.tpc = raReq->raReq.tpc;
13143 /* PHR handling for MSG3 */
13144 ulAllocRef->raCb = raCb;
13146 /* To the crntTime, add the MIN time at which UE will
13147 * actually send MSG3 i.e DL_DELTA+6 */
13148 raCb->msg3AllocTime = cell->crntTime;
13149 RGSCH_INCR_SUB_FRAME(raCb->msg3AllocTime, RG_SCH_CMN_MIN_MSG3_RECP_INTRVL);
13151 msg3SchdIdx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) %
13152 RGSCH_NUM_SUB_FRAMES;
13153 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
13154 special subframe */
13155 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][msg3SchdIdx] !=
13156 RG_SCH_TDD_UL_SUBFRAME)
13158 RGSCHCMNADDTOCRNTTIME(cell->crntTime,raCb->msg3AllocTime,
13159 RG_SCH_CMN_DL_DELTA)
13160 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][
13161 raCb->msg3AllocTime.slot];
13162 RGSCHCMNADDTOCRNTTIME(raCb->msg3AllocTime, raCb->msg3AllocTime,
13166 cmLListAdd2Tail(&subFrm->raRsp[rarCnt].raRspLst, &raCb->rspLnk);
13167 raCb->rspLnk.node = (PTR)raCb;
13168 cmLListDelFrm(reqLst, reqLst->first);
13170 /* ccpu00117052 - MOD - Passing double pointer
13171 for proper NULLP assignment*/
13172 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13173 sizeof(RgSchRaReqInfo));
13175 /* SR_RACH_STATS : RAR scheduled */
13180 /* Fill subframe data members */
13181 subFrm->raRsp[rarCnt].raRnti = raRspAlloc->rnti;
13182 subFrm->raRsp[rarCnt].pdcch = raRspAlloc->pdcch;
13183 subFrm->raRsp[rarCnt].tbSz = raRspAlloc->tbInfo[0].bytesAlloc;
13184 /* Fill PDCCH data members */
13185 rgSCHCmnFillPdcch(cell, subFrm->raRsp[rarCnt].pdcch, raRspAlloc);
13188 if(cell->overLoadBackOffEnab)
13189 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13190 subFrm->raRsp[rarCnt].backOffInd.pres = PRSNT_NODEF;
13191 subFrm->raRsp[rarCnt].backOffInd.val = cell->overLoadBackOffval;
13196 subFrm->raRsp[rarCnt].backOffInd.pres = NOTPRSNT;
13199 /*[ccpu00125212] Avoiding sending of empty RAR in case of RAR window
13200 is short and UE is sending unauthorised preamble.*/
13201 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13202 if ((raRspAlloc->biEstmt) && (reqLst->count))
13204 subFrm->raRsp[0].backOffInd.pres = PRSNT_NODEF;
13205 /* Added as part of Upgrade */
13206 subFrm->raRsp[0].backOffInd.val =
13207 rgSCHCmnGetBiIndex(cell, reqLst->count);
13209 /* SR_RACH_STATS : Back Off Inds */
13213 else if ((subFrm->raRsp[rarCnt].raRspLst.first == NULLP) &&
13214 (subFrm->raRsp[rarCnt].contFreeUeLst.first == NULLP))
13216 /* Return the grabbed PDCCH */
13217 rgSCHUtlPdcchPut(cell, &subFrm->pdcchInfo, raRspAlloc->pdcch);
13218 subFrm->raRsp[rarCnt].pdcch = NULLP;
13219 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRaRspAlloc(): "
13220 "Not even one RaReq.");
13224 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId,
13225 "RNTI:%d Scheduled RAR @ (%u,%u) ",
13227 cell->crntTime.sfn,
13228 cell->crntTime.slot);
13234 * @brief This function computes rv.
13238 * Function: rgSCHCmnDlCalcRvForBcch
13239 * Purpose: This function computes rv.
13241 * Invoked by: Common Scheduler
13243 * @param[in] RgSchCellCb *cell
13244 * @param[in] Bool si
13250 PRIVATE U8 rgSCHCmnDlCalcRvForBcch
13257 PRIVATE U8 rgSCHCmnDlCalcRvForBcch(cell, si, i)
13264 CmLteTimingInfo frm;
13265 TRC2(rgSCHCmnDlCalcRvForBcch);
13267 frm = cell->crntTime;
13268 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
13276 k = (frm.sfn/2) % 4;
13278 rv = RGSCH_CEIL(3*k, 2) % 4;
13283 * @brief This function Processes the Final Allocations
13284 * made by the RB Allocator against the requested
13285 * BCCH/PCCH allocations. Assumption: The reuqested
13286 * allocations are always satisfied completely.
13287 * Hence no roll back.
13291 * Function: rgSCHCmnDlBcchPcchFnlz
13292 * Purpose: This function Processes the Final Allocations
13293 * made by the RB Allocator against the requested.
13294 * Takes care of PDCCH filling.
13296 * Invoked by: Common Scheduler
13298 * @param[in] RgSchCellCb *cell
13299 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13304 PRIVATE Void rgSCHCmnDlBcchPcchFnlz
13307 RgSchCmnDlRbAllocInfo *allocInfo
13310 PRIVATE Void rgSCHCmnDlBcchPcchFnlz(cell, allocInfo)
13312 RgSchCmnDlRbAllocInfo *allocInfo;
13315 RgSchDlRbAlloc *rbAllocInfo;
13319 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
13321 #ifdef LTEMAC_HDFDD
13322 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13324 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13328 /* Moving variables to available scope for optimization */
13329 RgSchClcDlLcCb *pcch;
13332 RgSchClcDlLcCb *bcch;
13335 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13337 TRC2(rgSCHCmnDlBcchPcchFnlz);
13340 rbAllocInfo = &allocInfo->pcchAlloc;
13341 if (rbAllocInfo->pdcch)
13343 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13345 /* Added sfIdx calculation for TDD as well */
13347 #ifdef LTEMAC_HDFDD
13348 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13350 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13353 subFrm = rbAllocInfo->dlSf;
13354 pcch = rgSCHDbmGetPcch(cell);
13357 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnDlBcchPcchFnlz( ): "
13358 "No Pcch Present");
13362 /* Added Dl TB count for paging message transmission*/
13364 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13366 bo = (RgSchClcBoRpt *)pcch->boLst.first->node;
13367 cmLListDelFrm(&pcch->boLst, &bo->boLstEnt);
13368 /* ccpu00117052 - MOD - Passing double pointer
13369 for proper NULLP assignment*/
13370 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13371 /* Fill subframe data members */
13372 subFrm->pcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13373 subFrm->pcch.pdcch = rbAllocInfo->pdcch;
13374 /* Fill PDCCH data members */
13375 rgSCHCmnFillPdcch(cell, subFrm->pcch.pdcch, rbAllocInfo);
13376 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, pcch->lcId, TRUE);
13377 /* ccpu00132314-ADD-Update the tx power allocation info
13378 TODO-Need to add a check for max tx power per symbol */
13379 subfrmAlloc->cmnLcInfo.pcchInfo.txPwrOffset = cellDl->pcchTxPwrOffset;
13383 rbAllocInfo = &allocInfo->bcchAlloc;
13384 if (rbAllocInfo->pdcch)
13386 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13388 #ifdef LTEMAC_HDFDD
13389 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13391 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13394 subFrm = rbAllocInfo->dlSf;
13396 /* Fill subframe data members */
13397 subFrm->bcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13398 subFrm->bcch.pdcch = rbAllocInfo->pdcch;
13399 /* Fill PDCCH data members */
13400 rgSCHCmnFillPdcch(cell, subFrm->bcch.pdcch, rbAllocInfo);
13402 if(rbAllocInfo->schdFirst)
13405 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
13406 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13408 /*Copy the SIB1 msg buff into interface buffer */
13409 SCpyMsgMsg(cell->siCb.crntSiInfo.sib1Info.sib1,
13410 rgSchCb[cell->instIdx].rgSchInit.region,
13411 rgSchCb[cell->instIdx].rgSchInit.pool,
13412 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13413 #endif/*RGR_SI_SCH*/
13414 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13415 rgSCHCmnDlCalcRvForBcch(cell, FALSE, 0);
13423 i = cell->siCb.siCtx.i;
13424 /*Decrement the retransmission count */
13425 cell->siCb.siCtx.retxCntRem--;
13427 /*Copy the SI msg buff into interface buffer */
13428 if(cell->siCb.siCtx.warningSiFlag == FALSE)
13430 SCpyMsgMsg(cell->siCb.siArray[cell->siCb.siCtx.siId-1].si,
13431 rgSchCb[cell->instIdx].rgSchInit.region,
13432 rgSchCb[cell->instIdx].rgSchInit.pool,
13433 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13437 pdu = rgSCHUtlGetWarningSiPdu(cell);
13438 RGSCH_NULL_CHECK(cell->instIdx, pdu);
13440 rgSchCb[cell->instIdx].rgSchInit.region,
13441 rgSchCb[cell->instIdx].rgSchInit.pool,
13442 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13443 if(cell->siCb.siCtx.retxCntRem == 0)
13445 rgSCHUtlFreeWarningSiPdu(cell);
13446 cell->siCb.siCtx.warningSiFlag = FALSE;
13451 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
13452 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13454 if(bo->retxCnt != cell->siCfg.retxCnt-1)
13459 #endif/*RGR_SI_SCH*/
13460 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13461 rgSCHCmnDlCalcRvForBcch(cell, TRUE, i);
13464 /* Added Dl TB count for SIB1 and SI messages transmission.
13465 * This counter will be incremented only for the first transmission
13466 * (with RV 0) of these messages*/
13468 if(subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv == 0)
13470 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13474 if(bo->retxCnt == 0)
13476 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
13477 /* ccpu00117052 - MOD - Passing double pointer
13478 for proper NULLP assignment*/
13479 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13481 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, bcch->lcId, sendInd);
13483 /*Fill the interface info */
13484 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, NULLD, NULLD);
13486 /* ccpu00132314-ADD-Update the tx power allocation info
13487 TODO-Need to add a check for max tx power per symbol */
13488 subfrmAlloc->cmnLcInfo.bcchInfo.txPwrOffset = cellDl->bcchTxPwrOffset;
13490 /*mBuf has been already copied above */
13491 #endif/*RGR_SI_SCH*/
13504 * Function: rgSCHCmnUlSetAllUnSched
13507 * Invoked by: Common Scheduler
13509 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13514 PRIVATE Void rgSCHCmnUlSetAllUnSched
13516 RgSchCmnUlRbAllocInfo *allocInfo
13519 PRIVATE Void rgSCHCmnUlSetAllUnSched(allocInfo)
13520 RgSchCmnUlRbAllocInfo *allocInfo;
13525 TRC2(rgSCHCmnUlSetAllUnSched);
13527 node = allocInfo->contResLst.first;
13530 rgSCHCmnUlMov2NonSchdCntResLst(allocInfo, (RgSchUeCb *)node->node);
13531 node = allocInfo->contResLst.first;
13534 node = allocInfo->retxUeLst.first;
13537 rgSCHCmnUlMov2NonSchdRetxUeLst(allocInfo, (RgSchUeCb *)node->node);
13538 node = allocInfo->retxUeLst.first;
13541 node = allocInfo->ueLst.first;
13544 rgSCHCmnUlMov2NonSchdUeLst(allocInfo, (RgSchUeCb *)node->node);
13545 node = allocInfo->ueLst.first;
13557 * Function: rgSCHCmnUlAdd2CntResLst
13560 * Invoked by: Common Scheduler
13562 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13563 * @param[in] RgSchUeCb *ue
13568 PUBLIC Void rgSCHCmnUlAdd2CntResLst
13570 RgSchCmnUlRbAllocInfo *allocInfo,
13574 PUBLIC Void rgSCHCmnUlAdd2CntResLst(allocInfo, ue)
13575 RgSchCmnUlRbAllocInfo *allocInfo;
13579 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,ue->cell))->alloc);
13580 TRC2(rgSCHCmnUlAdd2CntResLst);
13581 cmLListAdd2Tail(&allocInfo->contResLst, &ulAllocInfo->reqLnk);
13582 ulAllocInfo->reqLnk.node = (PTR)ue;
13591 * Function: rgSCHCmnUlAdd2UeLst
13594 * Invoked by: Common Scheduler
13596 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13597 * @param[in] RgSchUeCb *ue
13602 PUBLIC Void rgSCHCmnUlAdd2UeLst
13605 RgSchCmnUlRbAllocInfo *allocInfo,
13609 PUBLIC Void rgSCHCmnUlAdd2UeLst(cell, allocInfo, ue)
13611 RgSchCmnUlRbAllocInfo *allocInfo;
13615 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,cell))->alloc);
13616 TRC2(rgSCHCmnUlAdd2UeLst);
13617 if (ulAllocInfo->reqLnk.node == NULLP)
13619 cmLListAdd2Tail(&allocInfo->ueLst, &ulAllocInfo->reqLnk);
13620 ulAllocInfo->reqLnk.node = (PTR)ue;
13630 * Function: rgSCHCmnAllocUlRb
13631 * Purpose: To do RB allocations for uplink
13633 * Invoked by: Common Scheduler
13635 * @param[in] RgSchCellCb *cell
13636 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
13640 PUBLIC Void rgSCHCmnAllocUlRb
13643 RgSchCmnUlRbAllocInfo *allocInfo
13646 PUBLIC Void rgSCHCmnAllocUlRb(cell, allocInfo)
13648 RgSchCmnUlRbAllocInfo *allocInfo;
13651 RgSchUlSf *sf = allocInfo->sf;
13652 TRC2(rgSCHCmnAllocUlRb);
13654 /* Schedule for new transmissions */
13655 rgSCHCmnUlRbAllocForLst(cell, sf, allocInfo->ueLst.count,
13656 &allocInfo->ueLst, &allocInfo->schdUeLst,
13657 &allocInfo->nonSchdUeLst, (Bool)TRUE);
13661 /***********************************************************
13663 * Func : rgSCHCmnUlRbAllocForLst
13665 * Desc : Allocate for a list in cmn rb alloc information passed
13674 **********************************************************/
13676 PRIVATE Void rgSCHCmnUlRbAllocForLst
13682 CmLListCp *schdLst,
13683 CmLListCp *nonSchdLst,
13687 PRIVATE Void rgSCHCmnUlRbAllocForLst(cell, sf, count, reqLst, schdLst,
13688 nonSchdLst, isNewTx)
13693 CmLListCp *schdLst;
13694 CmLListCp *nonSchdLst;
13703 CmLteTimingInfo timeInfo;
13706 TRC2(rgSCHCmnUlRbAllocForLst);
13708 if(schdLst->count == 0)
13710 cmLListInit(schdLst);
13713 cmLListInit(nonSchdLst);
13715 if(isNewTx == TRUE)
13717 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.numUes = (U8) count;
13719 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime, timeInfo, TFU_ULCNTRL_DLDELTA);
13720 k = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][timeInfo.subframe];
13721 RG_SCH_ADD_TO_CRNT_TIME(timeInfo,
13722 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo, k);
13724 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime,cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo,
13725 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
13730 for (lnk = reqLst->first; count; lnk = lnk->next, --count)
13732 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13733 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13738 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
13743 ueUl->subbandShare = ueUl->subbandRequired;
13744 if(isNewTx == TRUE)
13746 maxRb = RGSCH_MIN((ueUl->subbandRequired * MAX_5GTF_VRBG_SIZE), ue->ue5gtfCb.maxPrb);
13748 ret = rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole);
13751 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, schdLst);
13752 rgSCHCmnUlUeFillAllocInfo(cell, ue);
13756 gUl5gtfRbAllocFail++;
13757 #if defined (TENB_STATS) && defined (RG_5GTF)
13758 cell->tenbStats->sch.ul5gtfRbAllocFail++;
13760 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13761 ue->isMsg4PdcchWithCrnti = FALSE;
13762 ue->isSrGrant = FALSE;
13765 if(isNewTx == TRUE)
13767 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13768 ulAllocInfo[count - 1].rnti = ue->ueId;
13769 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13770 ulAllocInfo[count - 1].numPrb = ue->ul.nPrb;
13773 ueUl->subbandShare = 0; /* This reset will take care of
13774 * all scheduler types */
13776 for (; count; lnk = lnk->next, --count)
13778 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13779 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13780 ue->isMsg4PdcchWithCrnti = FALSE;
13786 /***********************************************************
13788 * Func : rgSCHCmnUlMdfyGrntForCqi
13790 * Desc : Modify UL Grant to consider presence of
13791 * CQI along with PUSCH Data.
13796 * - Scale down iTbs based on betaOffset and
13797 * size of Acqi Size.
13798 * - Optionally attempt to increase numSb by 1
13799 * if input payload size does not fit in due
13800 * to reduced tbSz as a result of iTbsNew.
13804 **********************************************************/
13806 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi
13818 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi(cell, ue, maxRb, numSb, iTbs, hqSz, stepDownItbs, effTgt)
13829 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(ue->cell);
13834 U32 remREsForPusch;
13837 U32 betaOffVal = ue->ul.betaOffstVal;
13838 U32 cqiRiRptSz = ue->ul.cqiRiSz;
13839 U32 betaOffHqVal = rgSchCmnBetaHqOffstTbl[ue->ul.betaHqOffst];
13840 U32 resNumSb = *numSb;
13841 U32 puschEff = 1000;
13844 Bool mdfyiTbsFlg = FALSE;
13845 U8 resiTbs = *iTbs;
13847 TRC2(rgSCHCmnUlMdfyGrntForCqi)
13852 iMcs = rgSCHCmnUlGetIMcsFrmITbs(resiTbs, RG_SCH_CMN_GET_UE_CTGY(ue));
13853 RG_SCH_UL_MCS_TO_MODODR(iMcs, modOdr);
13854 if (RG_SCH_CMN_GET_UE_CTGY(ue) != CM_LTE_UE_CAT_5)
13856 modOdr = RGSCH_MIN(RGSCH_QM_QPSK, modOdr);
13860 modOdr = RGSCH_MIN(RGSCH_QM_64QAM, modOdr);
13862 nPrb = resNumSb * cellUl->sbSize;
13863 /* Restricting the minumum iTbs requried to modify to 10 */
13864 if ((nPrb >= maxRb) && (resiTbs <= 10))
13866 /* Could not accomodate ACQI */
13869 totREs = nPrb * RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
13870 tbSz = rgTbSzTbl[0][resiTbs][nPrb-1];
13871 /* totalREs/tbSz = num of bits perRE. */
13872 cqiRiREs = (totREs * betaOffVal * cqiRiRptSz)/(1000 * tbSz); /* betaOffVal is represented
13873 as parts per 1000 */
13874 hqREs = (totREs * betaOffHqVal * hqSz)/(1000 * tbSz);
13875 if ((cqiRiREs + hqREs) < totREs)
13877 remREsForPusch = totREs - cqiRiREs - hqREs;
13878 bitsPerRe = (tbSz * 1000)/remREsForPusch; /* Multiplying by 1000 for Interger Oper */
13879 puschEff = bitsPerRe/modOdr;
13881 if (puschEff < effTgt)
13883 /* ensure resultant efficiency for PUSCH Data is within 0.93*/
13888 /* Alternate between increasing SB or decreasing iTbs until eff is met */
13889 if (mdfyiTbsFlg == FALSE)
13893 resNumSb = resNumSb + 1;
13895 mdfyiTbsFlg = TRUE;
13901 resiTbs-= stepDownItbs;
13903 mdfyiTbsFlg = FALSE;
13906 }while (1); /* Loop breaks if efficency is met
13907 or returns RFAILED if not able to meet the efficiency */
13915 /***********************************************************
13917 * Func : rgSCHCmnUlRbAllocForUe
13919 * Desc : Do uplink RB allocation for an UE.
13923 * Notes: Note that as of now, for retx, maxRb
13924 * is not considered. Alternatives, such
13925 * as dropping retx if it crosses maxRb
13926 * could be considered.
13930 **********************************************************/
13932 PRIVATE S16 rgSCHCmnUlRbAllocForUe
13941 PRIVATE S16 rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole)
13949 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
13950 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13951 RgSchUlAlloc *alloc = NULLP;
13957 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->schdHqProcIdx];
13959 RgSchUlHqProcCb *proc = NULLP;
13965 TfuDciFormat dciFrmt;
13969 TRC2(rgSCHCmnUlRbAllocForUe);
13971 rgSCHUhmGetAvlHqProc(cell, ue, &proc);
13974 //printf("UE [%d] HQ Proc unavailable\n", ue->ueId);
13979 if (ue->ue5gtfCb.rank == 2)
13981 dciFrmt = TFU_DCI_FORMAT_A2;
13986 dciFrmt = TFU_DCI_FORMAT_A1;
13989 /* 5gtf TODO : To pass dci frmt to this function */
13990 pdcch = rgSCHCmnPdcchAllocCrntSf(cell, ue);
13993 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
13994 "rgSCHCmnUlRbAllocForUe(): Could not get PDCCH for CRNTI:%d",ue->ueId);
13997 gUl5gtfPdcchSchd++;
13998 #if defined (TENB_STATS) && defined (RG_5GTF)
13999 cell->tenbStats->sch.ul5gtfPdcchSchd++;
14002 //TODO_SID using configured prb as of now
14003 nPrb = ue->ue5gtfCb.maxPrb;
14004 reqVrbg = nPrb/MAX_5GTF_VRBG_SIZE;
14005 iMcs = ue->ue5gtfCb.mcs; //gSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
14009 if((sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart > MAX_5GTF_VRBG)
14010 || (sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated > MAX_5GTF_VRBG))
14012 printf("5GTF_ERROR vrbg > 25 valstart = %d valalloc %d\n", sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart
14013 , sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated);
14018 /*TODO_SID: Workaround for alloc. Currently alloc is ulsf based. To handle multiple beams, we need a different
14019 design. Now alloc are formed based on MAX_5GTF_UE_SCH macro. */
14020 numVrbgTemp = MAX_5GTF_VRBG/MAX_5GTF_UE_SCH;
14023 alloc = rgSCHCmnUlSbAlloc(sf, numVrbgTemp,\
14026 if (alloc == NULLP)
14028 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
14029 "rgSCHCmnUlRbAllocForUe(): Could not get UlAlloc %d CRNTI:%d",numVrbg,ue->ueId);
14030 rgSCHCmnPdcchRlsCrntSf(cell, pdcch);
14033 gUl5gtfAllocAllocated++;
14034 #if defined (TENB_STATS) && defined (RG_5GTF)
14035 cell->tenbStats->sch.ul5gtfAllocAllocated++;
14037 alloc->grnt.vrbgStart = sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart;
14038 alloc->grnt.numVrbg = numVrbg;
14039 alloc->grnt.numLyr = numLyr;
14040 alloc->grnt.dciFrmt = dciFrmt;
14042 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart += numVrbg;
14043 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated += numVrbg;
14045 //rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
14047 sf->totPrb += alloc->grnt.numRb;
14048 ue->ul.nPrb = alloc->grnt.numRb;
14050 if (ue->csgMmbrSta != TRUE)
14052 cellUl->ncsgPrbCnt += alloc->grnt.numRb;
14054 cellUl->totPrbCnt += (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14055 alloc->pdcch = pdcch;
14056 alloc->grnt.iMcs = iMcs;
14057 alloc->grnt.iMcsCrnt = iMcsCrnt;
14058 alloc->grnt.hop = 0;
14059 /* Initial Num RBs support for UCI on PUSCH */
14061 ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14063 alloc->forMsg3 = FALSE;
14064 //RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTb5gtfSzTbl[0], (iTbs));
14066 //ueUl->alloc.allocdBytes = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
14067 /* TODO_SID Allocating based on configured MCS as of now.
14068 Currently for format A2. When doing multi grp per tti, need to update this. */
14069 ueUl->alloc.allocdBytes = (rgSch5gtfTbSzTbl[iMcs]/8) * ue->ue5gtfCb.rank;
14071 alloc->grnt.datSz = ueUl->alloc.allocdBytes;
14072 //TODO_SID Need to check mod order.
14073 RG_SCH_CMN_TBS_TO_MODODR(iMcs, alloc->grnt.modOdr);
14074 //alloc->grnt.modOdr = 6;
14075 alloc->grnt.isRtx = FALSE;
14077 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
14078 alloc->grnt.SCID = 0;
14079 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
14080 alloc->grnt.PMI = 0;
14081 alloc->grnt.uciOnxPUSCH = 0;
14082 alloc->grnt.hqProcId = proc->procId;
14084 alloc->hqProc = proc;
14085 alloc->hqProc->ulSfIdx = cellUl->schdIdx;
14087 /*commenting to retain the rnti used for transmission SPS/c-rnti */
14088 alloc->rnti = ue->ueId;
14089 ueUl->alloc.alloc = alloc;
14090 /*rntiwari-Adding the debug for generating the graph.*/
14091 /* No grant attr recorded now */
14095 /***********************************************************
14097 * Func : rgSCHCmnUlRbAllocAddUeToLst
14099 * Desc : Add UE to list (scheduled/non-scheduled list)
14100 * for UL RB allocation information.
14108 **********************************************************/
14110 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst
14117 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst(cell, ue, lst)
14123 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
14124 TRC2(rgSCHCmnUlRbAllocAddUeToLst);
14127 gUl5gtfUeRbAllocDone++;
14128 #if defined (TENB_STATS) && defined (RG_5GTF)
14129 cell->tenbStats->sch.ul5gtfUeRbAllocDone++;
14131 cmLListAdd2Tail(lst, &ueUl->alloc.schdLstLnk);
14132 ueUl->alloc.schdLstLnk.node = (PTR)ue;
14137 * @brief This function Processes the Final Allocations
14138 * made by the RB Allocator against the requested.
14142 * Function: rgSCHCmnUlAllocFnlz
14143 * Purpose: This function Processes the Final Allocations
14144 * made by the RB Allocator against the requested.
14146 * Invoked by: Common Scheduler
14148 * @param[in] RgSchCellCb *cell
14149 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14154 PRIVATE Void rgSCHCmnUlAllocFnlz
14157 RgSchCmnUlRbAllocInfo *allocInfo
14160 PRIVATE Void rgSCHCmnUlAllocFnlz(cell, allocInfo)
14162 RgSchCmnUlRbAllocInfo *allocInfo;
14165 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14166 TRC2(rgSCHCmnUlAllocFnlz);
14168 /* call scheduler specific Finalization */
14169 cellSch->apisUl->rgSCHUlAllocFnlz(cell, allocInfo);
14175 * @brief This function Processes the Final Allocations
14176 * made by the RB Allocator against the requested.
14180 * Function: rgSCHCmnDlAllocFnlz
14181 * Purpose: This function Processes the Final Allocations
14182 * made by the RB Allocator against the requested.
14184 * Invoked by: Common Scheduler
14186 * @param[in] RgSchCellCb *cell
14191 PUBLIC Void rgSCHCmnDlAllocFnlz
14196 PUBLIC Void rgSCHCmnDlAllocFnlz(cell)
14200 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14201 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
14203 TRC2(rgSCHCmnDlAllocFnlz);
14205 rgSCHCmnDlCcchRetxFnlz(cell, allocInfo);
14206 rgSCHCmnDlCcchTxFnlz(cell, allocInfo);
14208 /* Added below functions for handling CCCH SDU transmission received
14210 * * guard timer expiry*/
14211 rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo);
14212 rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo);
14214 rgSCHCmnDlRaRspFnlz(cell, allocInfo);
14215 /* call scheduler specific Finalization */
14216 cellSch->apisDl->rgSCHDlAllocFnlz(cell, allocInfo);
14218 /* Stack Crash problem for TRACE5 Changes. Added the return below */
14225 * @brief Update an uplink subframe.
14229 * Function : rgSCHCmnUlUpdSf
14231 * For each allocation
14232 * - if no more tx needed
14233 * - Release allocation
14235 * - Perform retransmission
14237 * @param[in] RgSchUlSf *sf
14241 PRIVATE Void rgSCHCmnUlUpdSf
14244 RgSchCmnUlRbAllocInfo *allocInfo,
14248 PRIVATE Void rgSCHCmnUlUpdSf(cell, allocInfo, sf)
14250 RgSchCmnUlRbAllocInfo *allocInfo;
14255 TRC2(rgSCHCmnUlUpdSf);
14257 while ((lnk = sf->allocs.first))
14259 RgSchUlAlloc *alloc = (RgSchUlAlloc *)lnk->node;
14262 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
14267 /* If need to handle all retx together, run another loop separately */
14268 rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc);
14270 rgSCHCmnUlRlsUlAlloc(cell, sf, alloc);
14273 /* By this time, all allocs would have been cleared and
14274 * SF is reset to be made ready for new allocations. */
14275 rgSCHCmnUlSfReset(cell, sf);
14276 /* In case there are timing problems due to msg3
14277 * allocations being done in advance, (which will
14278 * probably happen with the current FDD code that
14279 * handles 8 subframes) one solution
14280 * could be to hold the (recent) msg3 allocs in a separate
14281 * list, and then possibly add that to the actual
14282 * list later. So at this time while allocations are
14283 * traversed, the recent msg3 ones are not seen. Anytime after
14284 * this (a good time is when the usual allocations
14285 * are made), msg3 allocations could be transferred to the
14286 * normal list. Not doing this now as it is assumed
14287 * that incorporation of TDD shall take care of this.
14295 * @brief Handle uplink allocation for retransmission.
14299 * Function : rgSCHCmnUlHndlAllocRetx
14301 * Processing Steps:
14302 * - Add to queue for retx.
14303 * - Do not release here, release happends as part
14304 * of the loop that calls this function.
14306 * @param[in] RgSchCellCb *cell
14307 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14308 * @param[in] RgSchUlSf *sf
14309 * @param[in] RgSchUlAlloc *alloc
14313 PRIVATE Void rgSCHCmnUlHndlAllocRetx
14316 RgSchCmnUlRbAllocInfo *allocInfo,
14318 RgSchUlAlloc *alloc
14321 PRIVATE Void rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc)
14323 RgSchCmnUlRbAllocInfo *allocInfo;
14325 RgSchUlAlloc *alloc;
14329 RgSchCmnUlUe *ueUl;
14330 TRC2(rgSCHCmnUlHndlAllocRetx);
14332 rgTbSzTbl[0][rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs)]\
14333 [alloc->grnt.numRb-1]/8;
14334 if (!alloc->forMsg3)
14336 ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue);
14337 ueUl->alloc.reqBytes = bytes;
14338 rgSCHUhmRetx(alloc->hqProc);
14339 rgSCHCmnUlAdd2RetxUeLst(allocInfo, alloc->ue);
14343 /* RACHO msg3 retx handling. Part of RACH procedure changes. */
14344 retxAlloc = rgSCHCmnUlGetUlAlloc(cell, sf, alloc->numSb);
14345 if (retxAlloc == NULLP)
14347 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
14348 "rgSCHCmnUlRbAllocForUe():Could not get UlAlloc for msg3Retx RNTI:%d",
14352 retxAlloc->grnt.iMcs = alloc->grnt.iMcs;
14353 retxAlloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl\
14354 [alloc->hqProc->rvIdx];
14355 retxAlloc->grnt.nDmrs = 0;
14356 retxAlloc->grnt.hop = 0;
14357 retxAlloc->grnt.delayBit = 0;
14358 retxAlloc->rnti = alloc->rnti;
14359 retxAlloc->ue = NULLP;
14360 retxAlloc->pdcch = FALSE;
14361 retxAlloc->forMsg3 = TRUE;
14362 retxAlloc->raCb = alloc->raCb;
14363 retxAlloc->hqProc = alloc->hqProc;
14364 rgSCHUhmRetx(retxAlloc->hqProc);
14371 * @brief Uplink Scheduling Handler.
14375 * Function: rgSCHCmnUlAlloc
14376 * Purpose: This function Handles Uplink Scheduling.
14378 * Invoked by: Common Scheduler
14380 * @param[in] RgSchCellCb *cell
14383 /* ccpu00132653- The definition of this function made common for TDD and FDD*/
14385 PRIVATE Void rgSCHCmnUlAlloc
14390 PRIVATE Void rgSCHCmnUlAlloc(cell)
14394 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14395 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
14396 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
14397 RgSchCmnUlRbAllocInfo allocInfo;
14398 RgSchCmnUlRbAllocInfo *allocInfoRef = &allocInfo;
14404 TRC2(rgSCHCmnUlAlloc);
14406 /* Initializing RgSchCmnUlRbAllocInfo structure */
14407 rgSCHCmnInitUlRbAllocInfo(allocInfoRef);
14409 /* Get Uplink Subframe */
14410 allocInfoRef->sf = &cellUl->ulSfArr[cellUl->schdIdx];
14412 /* initializing the UL PRB count */
14413 allocInfoRef->sf->totPrb = 0;
14417 rgSCHCmnSpsUlTti(cell, allocInfoRef);
14420 if(*allocInfoRef->sf->allocCountRef == 0)
14424 if ((hole = rgSCHUtlUlHoleFirst(allocInfoRef->sf)) != NULLP)
14426 /* Sanity check of holeDb */
14427 if (allocInfoRef->sf->holeDb->count == 1 && hole->start == 0)
14429 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
14430 /* Re-Initialize available subbands because of CFI change*/
14431 allocInfoRef->sf->availSubbands = cell->dynCfiCb.\
14432 bwInfo[cellDl->currCfi].numSb;
14433 /*Currently initializing 5gtf ulsf specific initialization here.
14434 need to do at proper place */
14436 allocInfoRef->sf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
14437 allocInfoRef->sf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
14438 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
14440 allocInfoRef->sf->sfBeamInfo[idx].totVrbgAllocated = 0;
14441 allocInfoRef->sf->sfBeamInfo[idx].totVrbgRequired = 0;
14442 allocInfoRef->sf->sfBeamInfo[idx].vrbgStart = 0;
14448 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
14449 "Error! holeDb sanity check failed");
14454 /* Fix: Adaptive re-transmissions prioritised over other transmissions */
14455 /* perform adaptive retransmissions */
14456 rgSCHCmnUlSfReTxAllocs(cell, allocInfoRef->sf);
14460 /* Fix: syed Adaptive Msg3 Retx crash. Release all
14461 Harq processes for which adap Retx failed, to avoid
14462 blocking. This step should be done before New TX
14463 scheduling to make hqProc available. Right now we
14464 dont check if proc is in adap Retx list for considering
14465 it to be available. But now with this release that
14466 functionality would be correct. */
14468 rgSCHCmnUlSfRlsRetxProcs(cell, allocInfoRef->sf);
14471 /* Specific UL scheduler to perform UE scheduling */
14472 cellSch->apisUl->rgSCHUlSched(cell, allocInfoRef);
14474 /* Call UL RB allocator module */
14475 rgSCHCmnAllocUlRb(cell, allocInfoRef);
14477 /* Do group power control for PUSCH */
14478 rgSCHCmnGrpPwrCntrlPusch(cell, allocInfoRef->sf);
14480 cell->sc.apis->rgSCHDrxStrtInActvTmrInUl(cell);
14482 rgSCHCmnUlAllocFnlz(cell, allocInfoRef);
14483 if(5000 == g5gtfTtiCnt)
14485 ul5gtfsidDlAlreadyMarkUl = 0;
14486 ul5gtfsidDlSchdPass = 0;
14487 ul5gtfsidUlMarkUl = 0;
14488 ul5gtfTotSchdCnt = 0;
14496 * @brief send Subframe Allocations.
14500 * Function: rgSCHCmnSndCnsldtInfo
14501 * Purpose: Send the scheduled
14502 * allocations to MAC for StaInd generation to Higher layers and
14503 * for MUXing. PST's RgInfSfAlloc to MAC instance.
14505 * Invoked by: Common Scheduler
14507 * @param[in] RgSchCellCb *cell
14511 PUBLIC Void rgSCHCmnSndCnsldtInfo
14516 PUBLIC Void rgSCHCmnSndCnsldtInfo(cell)
14520 RgInfSfAlloc *subfrmAlloc;
14522 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14524 TRC2(rgSCHCmnSndCnsldtInfo);
14526 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14528 /* Send the allocations to MAC for MUXing */
14529 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
14530 subfrmAlloc->cellId = cell->cellId;
14531 /* Populate the List of UEs needing PDB-based Flow control */
14532 cellSch->apisDl->rgSCHDlFillFlwCtrlInfo(cell, subfrmAlloc);
14534 if((subfrmAlloc->rarInfo.numRaRntis) ||
14536 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14537 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14538 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14540 (subfrmAlloc->ueInfo.numUes) ||
14541 (subfrmAlloc->cmnLcInfo.bitMask) ||
14542 (subfrmAlloc->ulUeInfo.numUes) ||
14543 (subfrmAlloc->flowCntrlInfo.numUes))
14545 if((subfrmAlloc->rarInfo.numRaRntis) ||
14547 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14548 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14549 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14551 (subfrmAlloc->ueInfo.numUes) ||
14552 (subfrmAlloc->cmnLcInfo.bitMask) ||
14553 (subfrmAlloc->flowCntrlInfo.numUes))
14556 RgSchMacSfAlloc(&pst, subfrmAlloc);
14559 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_NUM_SUB_FRAMES;
14561 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_SF_ALLOC_SIZE;
14567 * @brief Consolidate Subframe Allocations.
14571 * Function: rgSCHCmnCnsldtSfAlloc
14572 * Purpose: Consolidate Subframe Allocations.
14574 * Invoked by: Common Scheduler
14576 * @param[in] RgSchCellCb *cell
14580 PUBLIC Void rgSCHCmnCnsldtSfAlloc
14585 PUBLIC Void rgSCHCmnCnsldtSfAlloc(cell)
14589 RgInfSfAlloc *subfrmAlloc;
14590 CmLteTimingInfo frm;
14592 CmLListCp dlDrxInactvTmrLst;
14593 CmLListCp dlInActvLst;
14594 CmLListCp ulInActvLst;
14595 RgSchCmnCell *cellSch = NULLP;
14597 TRC2(rgSCHCmnCnsldtSfAlloc);
14599 cmLListInit(&dlDrxInactvTmrLst);
14600 cmLListInit(&dlInActvLst);
14601 cmLListInit(&ulInActvLst);
14603 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14605 /* Get Downlink Subframe */
14606 frm = cell->crntTime;
14607 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
14608 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14610 /* Fill the allocation Info */
14611 rgSCHUtlFillRgInfRarInfo(dlSf, subfrmAlloc, cell);
14614 rgSCHUtlFillRgInfUeInfo(dlSf, cell, &dlDrxInactvTmrLst,
14615 &dlInActvLst, &ulInActvLst);
14616 #ifdef RG_PFS_STATS
14617 cell->totalPrb += dlSf->bwAssigned;
14619 /* Mark the following Ues inactive for UL*/
14620 cellSch = RG_SCH_CMN_GET_CELL(cell);
14622 /* Calling Scheduler specific function with DRX inactive UE list*/
14623 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInActvLst);
14624 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInActvLst);
14627 /*re/start DRX inactivity timer for the UEs*/
14628 (Void)rgSCHDrxStrtInActvTmr(cell,&dlDrxInactvTmrLst,RG_SCH_DRX_DL);
14634 * @brief Initialize the DL Allocation Information Structure.
14638 * Function: rgSCHCmnInitDlRbAllocInfo
14639 * Purpose: Initialize the DL Allocation Information Structure.
14641 * Invoked by: Common Scheduler
14643 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
14647 PRIVATE Void rgSCHCmnInitDlRbAllocInfo
14649 RgSchCmnDlRbAllocInfo *allocInfo
14652 PRIVATE Void rgSCHCmnInitDlRbAllocInfo(allocInfo)
14653 RgSchCmnDlRbAllocInfo *allocInfo;
14656 TRC2(rgSCHCmnInitDlRbAllocInfo);
14657 cmMemset((U8 *)&allocInfo->pcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14658 cmMemset((U8 *)&allocInfo->bcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14659 cmMemset((U8 *)allocInfo->raRspAlloc, (U8)0,
14660 RG_SCH_CMN_MAX_CMN_PDCCH*sizeof(RgSchDlRbAlloc));
14662 allocInfo->msg4Alloc.msg4DlSf = NULLP;
14663 cmLListInit(&allocInfo->msg4Alloc.msg4TxLst);
14664 cmLListInit(&allocInfo->msg4Alloc.msg4RetxLst);
14665 cmLListInit(&allocInfo->msg4Alloc.schdMsg4TxLst);
14666 cmLListInit(&allocInfo->msg4Alloc.schdMsg4RetxLst);
14667 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4TxLst);
14668 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4RetxLst);
14670 allocInfo->ccchSduAlloc.ccchSduDlSf = NULLP;
14671 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduTxLst);
14672 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduRetxLst);
14673 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduTxLst);
14674 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduRetxLst);
14675 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst);
14676 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst);
14679 allocInfo->dedAlloc.dedDlSf = NULLP;
14680 cmLListInit(&allocInfo->dedAlloc.txHqPLst);
14681 cmLListInit(&allocInfo->dedAlloc.retxHqPLst);
14682 cmLListInit(&allocInfo->dedAlloc.schdTxHqPLst);
14683 cmLListInit(&allocInfo->dedAlloc.schdRetxHqPLst);
14684 cmLListInit(&allocInfo->dedAlloc.nonSchdTxHqPLst);
14685 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxHqPLst);
14687 cmLListInit(&allocInfo->dedAlloc.txRetxHqPLst);
14688 cmLListInit(&allocInfo->dedAlloc.schdTxRetxHqPLst);
14689 cmLListInit(&allocInfo->dedAlloc.nonSchdTxRetxHqPLst);
14691 cmLListInit(&allocInfo->dedAlloc.txSpsHqPLst);
14692 cmLListInit(&allocInfo->dedAlloc.retxSpsHqPLst);
14693 cmLListInit(&allocInfo->dedAlloc.schdTxSpsHqPLst);
14694 cmLListInit(&allocInfo->dedAlloc.schdRetxSpsHqPLst);
14695 cmLListInit(&allocInfo->dedAlloc.nonSchdTxSpsHqPLst);
14696 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxSpsHqPLst);
14700 rgSCHLaaCmnInitDlRbAllocInfo (allocInfo);
14703 cmLListInit(&allocInfo->dedAlloc.errIndTxHqPLst);
14704 cmLListInit(&allocInfo->dedAlloc.schdErrIndTxHqPLst);
14705 cmLListInit(&allocInfo->dedAlloc.nonSchdErrIndTxHqPLst);
14710 * @brief Initialize the UL Allocation Information Structure.
14714 * Function: rgSCHCmnInitUlRbAllocInfo
14715 * Purpose: Initialize the UL Allocation Information Structure.
14717 * Invoked by: Common Scheduler
14719 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
14723 PUBLIC Void rgSCHCmnInitUlRbAllocInfo
14725 RgSchCmnUlRbAllocInfo *allocInfo
14728 PUBLIC Void rgSCHCmnInitUlRbAllocInfo(allocInfo)
14729 RgSchCmnUlRbAllocInfo *allocInfo;
14732 TRC2(rgSCHCmnInitUlRbAllocInfo);
14733 allocInfo->sf = NULLP;
14734 cmLListInit(&allocInfo->contResLst);
14735 cmLListInit(&allocInfo->schdContResLst);
14736 cmLListInit(&allocInfo->nonSchdContResLst);
14737 cmLListInit(&allocInfo->ueLst);
14738 cmLListInit(&allocInfo->schdUeLst);
14739 cmLListInit(&allocInfo->nonSchdUeLst);
14745 * @brief Scheduling for PUCCH group power control.
14749 * Function: rgSCHCmnGrpPwrCntrlPucch
14750 * Purpose: This function does group power control for PUCCH
14751 * corresponding to the subframe for which DL UE allocations
14754 * Invoked by: Common Scheduler
14756 * @param[in] RgSchCellCb *cell
14760 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch
14766 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch(cell, dlSf)
14771 TRC2(rgSCHCmnGrpPwrCntrlPucch);
14773 rgSCHPwrGrpCntrlPucch(cell, dlSf);
14779 * @brief Scheduling for PUSCH group power control.
14783 * Function: rgSCHCmnGrpPwrCntrlPusch
14784 * Purpose: This function does group power control, for
14785 * the subframe for which UL allocation has (just) happened.
14787 * Invoked by: Common Scheduler
14789 * @param[in] RgSchCellCb *cell
14790 * @param[in] RgSchUlSf *ulSf
14794 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch
14800 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch(cell, ulSf)
14805 /*removed unused variable *cellSch*/
14806 CmLteTimingInfo frm;
14809 TRC2(rgSCHCmnGrpPwrCntrlPusch);
14811 /* Got to pass DL SF corresponding to UL SF, so get that first.
14812 * There is no easy way of getting dlSf by having the RgSchUlSf*,
14813 * so use the UL delta from current time to get the DL SF. */
14814 frm = cell->crntTime;
14817 if(cell->emtcEnable == TRUE)
14819 RGSCH_INCR_SUB_FRAME_EMTC(frm, TFU_DLCNTRL_DLDELTA);
14824 RGSCH_INCR_SUB_FRAME(frm, TFU_DLCNTRL_DLDELTA);
14826 /* Del filling of dl.time */
14827 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14829 rgSCHPwrGrpCntrlPusch(cell, dlSf, ulSf);
14834 /* Fix: syed align multiple UEs to refresh at same time */
14835 /***********************************************************
14837 * Func : rgSCHCmnApplyUeRefresh
14839 * Desc : Apply UE refresh in CMN and Specific
14840 * schedulers. Data rates and corresponding
14841 * scratchpad variables are updated.
14849 **********************************************************/
14851 PRIVATE S16 rgSCHCmnApplyUeRefresh
14857 PRIVATE S16 rgSCHCmnApplyUeRefresh(cell, ue)
14862 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14864 U32 effNonGbrBsr = 0;
14867 TRC2(rgSCHCmnApplyUeRefresh);
14869 /* Reset the refresh cycle variableCAP */
14870 ue->ul.effAmbr = ue->ul.cfgdAmbr;
14872 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
14874 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
14876 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
14878 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
14880 cmnLcg->effGbr = cmnLcg->cfgdGbr;
14881 cmnLcg->effDeltaMbr = cmnLcg->deltaMbr;
14882 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
14883 /* Considering GBR LCG will be prioritised by UE */
14884 effGbrBsr += cmnLcg->bs;
14885 }/* Else no remaing BS so nonLcg0 will be updated when BSR will be received */
14888 effNonGbrBsr += cmnLcg->reportedBs;
14889 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
14893 effNonGbrBsr = RGSCH_MIN(effNonGbrBsr,ue->ul.effAmbr);
14894 ue->ul.nonGbrLcgBs = effNonGbrBsr;
14896 ue->ul.nonLcg0Bs = effGbrBsr + effNonGbrBsr;
14897 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
14898 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
14901 /* call scheduler specific event handlers
14902 * for refresh timer expiry */
14903 cellSch->apisUl->rgSCHUlUeRefresh(cell, ue);
14904 cellSch->apisDl->rgSCHDlUeRefresh(cell, ue);
14909 /***********************************************************
14911 * Func : rgSCHCmnTmrExpiry
14913 * Desc : Adds an UE to refresh queue, so that the UE is
14914 * periodically triggered to refresh it's GBR and
14923 **********************************************************/
14925 PRIVATE S16 rgSCHCmnTmrExpiry
14927 PTR cb, /* Pointer to timer control block */
14928 S16 tmrEvnt /* Timer Event */
14931 PRIVATE S16 rgSCHCmnTmrExpiry(cb, tmrEvnt)
14932 PTR cb; /* Pointer to timer control block */
14933 S16 tmrEvnt; /* Timer Event */
14936 RgSchUeCb *ue = (RgSchUeCb *)cb;
14937 RgSchCellCb *cell = ue->cell;
14938 #if (ERRCLASS & ERRCLS_DEBUG)
14941 TRC2(rgSCHCmnTmrExpiry);
14943 #if (ERRCLASS & ERRCLS_DEBUG)
14944 if (tmrEvnt != RG_SCH_CMN_EVNT_UE_REFRESH)
14946 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnTmrExpiry(): Invalid "
14947 "timer event CRNTI:%d",ue->ueId);
14954 rgSCHCmnApplyUeRefresh(cell, ue);
14956 rgSCHCmnAddUeToRefreshQ(cell, ue, RG_SCH_CMN_REFRESH_TIME);
14961 /***********************************************************
14963 * Func : rgSCHCmnTmrProc
14965 * Desc : Timer entry point per cell. Timer
14966 * processing is triggered at every frame boundary
14975 **********************************************************/
14977 PRIVATE S16 rgSCHCmnTmrProc
14982 PRIVATE S16 rgSCHCmnTmrProc(cell)
14986 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
14987 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
14988 /* Moving the assignment of scheduler pointer
14989 to available scope for optimization */
14990 TRC2(rgSCHCmnTmrProc);
14992 if ((cell->crntTime.slot % RGSCH_NUM_SUB_FRAMES_5G) == 0)
14994 /* Reset the counters periodically */
14995 if ((cell->crntTime.sfn % RG_SCH_CMN_CSG_REFRESH_TIME) == 0)
14997 RG_SCH_RESET_HCSG_DL_PRB_CNTR(cmnDlCell);
14998 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cmnUlCell);
15000 if ((cell->crntTime.sfn % RG_SCH_CMN_OVRLDCTRL_REFRESH_TIME) == 0)
15003 cell->measurements.ulTpt = ((cell->measurements.ulTpt * 95) + ( cell->measurements.ulBytesCnt * 5))/100;
15004 cell->measurements.dlTpt = ((cell->measurements.dlTpt * 95) + ( cell->measurements.dlBytesCnt * 5))/100;
15006 rgSCHUtlCpuOvrLdAdjItbsCap(cell);
15007 /* reset cell level tpt measurements for next cycle */
15008 cell->measurements.ulBytesCnt = 0;
15009 cell->measurements.dlBytesCnt = 0;
15011 /* Comparing with Zero instead of % is being done for efficiency.
15012 * If Timer resolution changes then accordingly update the
15013 * macro RG_SCH_CMN_REFRESH_TIMERES */
15014 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
15015 cmPrcTmr(&sched->tmrTqCp, sched->tmrTq, (PFV)rgSCHCmnTmrExpiry);
15022 /***********************************************************
15024 * Func : rgSchCmnUpdCfiVal
15026 * Desc : Update the CFI value if CFI switch was done
15034 **********************************************************/
15036 PRIVATE Void rgSchCmnUpdCfiVal
15042 PRIVATE Void rgSchCmnUpdCfiVal(cell, delta)
15048 CmLteTimingInfo pdsch;
15049 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15058 TRC2(rgSchCmnUpdCfiVal);
15060 pdsch = cell->crntTime;
15061 RGSCH_INCR_SUB_FRAME(pdsch, delta);
15062 dlSf = rgSCHUtlSubFrmGet(cell, pdsch);
15063 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15064 *change happens in that SF then UL PDCCH allocation happens with old CFI
15065 *but CFI in control Req goes updated one since it was stored in the CELL
15067 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15068 if(cell->dynCfiCb.pdcchSfIdx != 0xFF)
15071 dlIdx = rgSCHUtlGetDlSfIdx(cell, &pdsch);
15073 dlIdx = (((pdsch.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (pdsch.slot % RGSCH_NUM_SUB_FRAMES));
15074 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15076 /* If current downlink subframe index is same as pdcch SF index,
15077 * perform the switching of CFI in this subframe */
15078 if(cell->dynCfiCb.pdcchSfIdx == dlIdx)
15080 cellCmnDl->currCfi = cellCmnDl->newCfi;
15081 cell->dynCfiCb.pdcchSfIdx = 0xFF;
15083 /* Updating the nCce value based on the new CFI */
15085 splSfCfi = cellCmnDl->newCfi;
15086 for(idx = 0; idx < cell->numDlSubfrms; idx++)
15088 tddSf = cell->subFrms[idx];
15090 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][tddSf->sfNum];
15092 if(tddSf->sfType == RG_SCH_SPL_SF_DATA)
15094 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
15096 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
15100 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][cellCmnDl->currCfi];
15103 /* Setting the switch over window length based on config index.
15104 * During switch over period all the UL trnsmissions are Acked
15106 cell->dynCfiCb.switchOvrWinLen =
15107 rgSchCfiSwitchOvrWinLen[cell->ulDlCfgIdx];
15109 cell->nCce = cell->dynCfiCb.cfi2NCceTbl[0][cellCmnDl->currCfi];
15110 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15111 *change happens in that SF then UL PDCCH allocation happens with old CFI
15112 *but CFI in control Req goes updated one since it was stored in the CELL
15114 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15115 cell->dynCfiCb.switchOvrWinLen = rgSchCfiSwitchOvrWinLen[7];
15123 /***********************************************************
15125 * Func : rgSchCmnUpdtPdcchSfIdx
15127 * Desc : Update the switch over window length
15135 **********************************************************/
15138 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15145 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, sfNum)
15152 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15158 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx)
15166 TRC2(rgSchCmnUpdtPdcchSfIdx);
15168 /* Resetting the parameters on CFI switching */
15169 cell->dynCfiCb.cceUsed = 0;
15170 cell->dynCfiCb.lowCceCnt = 0;
15172 cell->dynCfiCb.cceFailSum = 0;
15173 cell->dynCfiCb.cceFailCnt = 0;
15174 cell->dynCfiCb.prevCceFailIdx = 0;
15176 cell->dynCfiCb.switchOvrInProgress = TRUE;
15178 for(idx = 0; idx < cell->dynCfiCb.numFailSamples; idx++)
15180 cell->dynCfiCb.cceFailSamples[idx] = 0;
15183 cell->dynCfiCb.ttiCnt = 0;
15185 cell->dynCfiCb.cfiSwitches++;
15186 cfiSwitchCnt = cell->dynCfiCb.cfiSwitches;
15189 cell->dynCfiCb.pdcchSfIdx = (dlIdx +
15190 rgSchTddPdcchSfIncTbl[cell->ulDlCfgIdx][sfNum]) % cell->numDlSubfrms;
15192 cell->dynCfiCb.pdcchSfIdx = (dlIdx + RG_SCH_CFI_APPLY_DELTA) % \
15193 RGSCH_NUM_DL_slotS;
15197 /***********************************************************
15199 * Func : rgSchCmnUpdCfiDb
15201 * Desc : Update the counters related to dynamic
15202 * CFI feature in cellCb.
15210 **********************************************************/
15212 PUBLIC Void rgSchCmnUpdCfiDb
15218 PUBLIC Void rgSchCmnUpdCfiDb(cell, delta)
15223 CmLteTimingInfo frm;
15229 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15230 U8 nCceLowerCfi = 0;
15237 TRC2(rgSchCmnUpdCfiDb);
15239 /* Get Downlink Subframe */
15240 frm = cell->crntTime;
15241 RGSCH_INCR_SUB_FRAME(frm, delta);
15244 dlIdx = rgSCHUtlGetDlSfIdx(cell, &frm);
15245 dlSf = cell->subFrms[dlIdx];
15246 isHiDci0 = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15248 /* Changing the idexing
15249 so that proper subframe is selected */
15250 dlIdx = (((frm.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (frm.slot % RGSCH_NUM_SUB_FRAMES));
15251 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15252 dlSf = cell->subFrms[dlIdx];
15255 currCfi = cellSch->dl.currCfi;
15257 if(!cell->dynCfiCb.switchOvrInProgress)
15260 if(!cell->dynCfiCb.isDynCfiEnb)
15262 if(currCfi != cellSch->cfiCfg.cfi)
15264 if(currCfi < cellSch->cfiCfg.cfi)
15266 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15267 cfiIncr = cell->dynCfiCb.cfiIncr;
15271 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15272 cfiDecr = cell->dynCfiCb.cfiDecr;
15279 /* Setting ttiMod to 0 for ttiCnt > 1000 in case if this
15280 * function was not called in UL subframe*/
15281 if(cell->dynCfiCb.ttiCnt > RGSCH_CFI_TTI_MON_INTRVL)
15288 ttiMod = cell->dynCfiCb.ttiCnt % RGSCH_CFI_TTI_MON_INTRVL;
15291 dlSf->dlUlBothCmplt++;
15293 if((dlSf->dlUlBothCmplt == 2) || (!isHiDci0))
15295 if(dlSf->dlUlBothCmplt == 2)
15298 /********************STEP UP CRITERIA********************/
15299 /* Updating the CCE failure count parameter */
15300 cell->dynCfiCb.cceFailCnt += dlSf->isCceFailure;
15301 cell->dynCfiCb.cceFailSum += dlSf->isCceFailure;
15303 /* Check if cfi step up can be performed */
15304 if(currCfi < cell->dynCfiCb.maxCfi)
15306 if(cell->dynCfiCb.cceFailSum >= cell->dynCfiCb.cfiStepUpTtiCnt)
15308 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15309 cfiIncr = cell->dynCfiCb.cfiIncr;
15314 /********************STEP DOWN CRITERIA********************/
15316 /* Updating the no. of CCE used in this dl subframe */
15317 cell->dynCfiCb.cceUsed += dlSf->cceCnt;
15319 if(currCfi > RGSCH_MIN_CFI_VAL)
15321 /* calculating the number of CCE for next lower CFI */
15323 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15324 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[mPhich][currCfi-1];
15326 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[0][currCfi-1];
15328 if(dlSf->cceCnt < nCceLowerCfi)
15330 /* Updating the count of TTIs in which no. of CCEs
15331 * used were less than the CCEs of next lower CFI */
15332 cell->dynCfiCb.lowCceCnt++;
15337 totalCce = (nCceLowerCfi * cell->dynCfiCb.cfiStepDownTtiCnt *
15338 RGSCH_CFI_CCE_PERCNTG)/100;
15340 if((!cell->dynCfiCb.cceFailSum) &&
15341 (cell->dynCfiCb.lowCceCnt >=
15342 cell->dynCfiCb.cfiStepDownTtiCnt) &&
15343 (cell->dynCfiCb.cceUsed < totalCce))
15345 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15346 cfiDecr = cell->dynCfiCb.cfiDecr;
15352 cceFailIdx = ttiMod/cell->dynCfiCb.failSamplePrd;
15354 if(cceFailIdx != cell->dynCfiCb.prevCceFailIdx)
15356 /* New sample period has started. Subtract the old count
15357 * from the new sample period */
15358 cell->dynCfiCb.cceFailSum -= cell->dynCfiCb.cceFailSamples[cceFailIdx];
15360 /* Store the previous sample period data */
15361 cell->dynCfiCb.cceFailSamples[cell->dynCfiCb.prevCceFailIdx]
15362 = cell->dynCfiCb.cceFailCnt;
15364 cell->dynCfiCb.prevCceFailIdx = cceFailIdx;
15366 /* Resetting the CCE failure count as zero for next sample period */
15367 cell->dynCfiCb.cceFailCnt = 0;
15372 /* Restting the parametrs after Monitoring Interval expired */
15373 cell->dynCfiCb.cceUsed = 0;
15374 cell->dynCfiCb.lowCceCnt = 0;
15375 cell->dynCfiCb.ttiCnt = 0;
15378 cell->dynCfiCb.ttiCnt++;
15382 if(cellSch->dl.newCfi != cellSch->dl.currCfi)
15385 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, dlSf->sfNum);
15387 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx);
15394 * @brief Dl Scheduler for Broadcast and Common channel scheduling.
15398 * Function: rgSCHCmnDlCommonChSch
15399 * Purpose: This function schedules DL Common channels for LTE.
15400 * Invoked by TTI processing in TOM. Scheduling is done for
15401 * BCCH, PCCH, Msg4, CCCH SDU, RAR in that order
15403 * Invoked by: TOM (TTI processing)
15405 * @param[in] RgSchCellCb *cell
15409 PUBLIC Void rgSCHCmnDlCommonChSch
15414 PUBLIC Void rgSCHCmnDlCommonChSch(cell)
15418 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15420 TRC2(rgSCHCmnDlCommonChSch);
15422 cellSch->apisDl->rgSCHDlTickForPdbTrkng(cell);
15423 rgSchCmnUpdCfiVal(cell, RG_SCH_CMN_DL_DELTA);
15425 /* handle Inactive UEs for DL */
15426 rgSCHCmnHdlDlInactUes(cell);
15428 /* Send a Tick to Refresh Timer */
15429 rgSCHCmnTmrProc(cell);
15431 if (cell->isDlDataAllwd && (cell->stopSiSch == FALSE))
15433 rgSCHCmnInitRbAlloc(cell);
15434 /* Perform DL scheduling of BCCH, PCCH */
15435 rgSCHCmnDlBcchPcchAlloc(cell);
15439 if(cell->siCb.inWindow != 0)
15441 cell->siCb.inWindow--;
15444 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
15446 rgSCHCmnDlCcchRarAlloc(cell);
15452 * @brief Scheduler invocation per TTI.
15456 * Function: rgSCHCmnUlSch
15457 * Purpose: This function implements UL scheduler alone. This is to
15458 * be able to perform scheduling with more flexibility.
15460 * Invoked by: TOM (TTI processing)
15462 * @param[in] RgSchCellCb *cell
15466 PUBLIC Void rgSCHCmnUlSch
15471 PUBLIC Void rgSCHCmnUlSch(cell)
15475 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15477 TRC2(rgSCHCmnUlSch);
15481 if(TRUE == rgSCHLaaSCellEnabled(cell))
15487 if(cellSch->ul.schdIdx != RGSCH_INVALID_INFO)
15489 rgSchCmnUpdCfiVal(cell, TFU_ULCNTRL_DLDELTA);
15491 /* Handle Inactive UEs for UL */
15492 rgSCHCmnHdlUlInactUes(cell);
15493 /* Perform UL Scheduling EVERY TTI */
15494 rgSCHCmnUlAlloc(cell);
15496 /* Calling function to update CFI parameters*/
15497 rgSchCmnUpdCfiDb(cell, TFU_ULCNTRL_DLDELTA);
15499 if(cell->dynCfiCb.switchOvrWinLen > 0)
15501 /* Decrementing the switchover window length */
15502 cell->dynCfiCb.switchOvrWinLen--;
15504 if(!cell->dynCfiCb.switchOvrWinLen)
15506 if(cell->dynCfiCb.dynCfiRecfgPend)
15508 /* Toggling the Dynamic CFI enabling */
15509 cell->dynCfiCb.isDynCfiEnb ^= 1;
15510 rgSCHDynCfiReCfg(cell, cell->dynCfiCb.isDynCfiEnb);
15511 cell->dynCfiCb.dynCfiRecfgPend = FALSE;
15513 cell->dynCfiCb.switchOvrInProgress = FALSE;
15521 rgSCHCmnSpsUlTti(cell, NULLP);
15531 * @brief This function updates the scheduler with service for an UE.
15535 * Function: rgSCHCmnDlDedBoUpd
15536 * Purpose: This function should be called whenever there is a
15537 * change BO for a service.
15539 * Invoked by: BO and Scheduler
15541 * @param[in] RgSchCellCb* cell
15542 * @param[in] RgSchUeCb* ue
15543 * @param[in] RgSchDlLcCb* svc
15548 PUBLIC Void rgSCHCmnDlDedBoUpd
15555 PUBLIC Void rgSCHCmnDlDedBoUpd(cell, ue, svc)
15561 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15562 TRC2(rgSCHCmnDlDedBoUpd);
15564 /* RACHO : if UEs idle time exceeded and a BO update
15565 * is received, then add UE to the pdcch Order Q */
15566 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
15568 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue, cell);
15569 /* If PDCCH order is already triggered and we are waiting for
15570 * RACH from UE then do not add to PdcchOdrQ. */
15571 if (ueDl->rachInfo.rapIdLnk.node == NULLP)
15573 rgSCHCmnDlAdd2PdcchOdrQ(cell, ue);
15579 /* If SPS service, invoke SPS module */
15580 if (svc->dlLcSpsCfg.isSpsEnabled)
15582 rgSCHCmnSpsDlDedBoUpd(cell, ue, svc);
15583 /* Note: Retrun from here, no update needed in other schedulers */
15588 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
15590 cellSch->apisEmtcDl->rgSCHDlDedBoUpd(cell, ue, svc);
15591 //printf("rgSCHEMTCDlDedBoUpd\n");
15596 cellSch->apisDl->rgSCHDlDedBoUpd(cell, ue, svc);
15601 rgSCHSCellDlDedBoUpd(cell, ue, svc);
15609 * @brief Removes an UE from Cell's TA List.
15613 * Function: rgSCHCmnRmvFrmTaLst
15614 * Purpose: Removes an UE from Cell's TA List.
15616 * Invoked by: Specific Scheduler
15618 * @param[in] RgSchCellCb* cell
15619 * @param[in] RgSchUeCb* ue
15624 PUBLIC Void rgSCHCmnRmvFrmTaLst
15630 PUBLIC Void rgSCHCmnRmvFrmTaLst(cell, ue)
15635 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15636 TRC2(rgSCHCmnRmvFrmTaLst);
15639 if(cell->emtcEnable && ue->isEmtcUe)
15641 rgSCHEmtcRmvFrmTaLst(cellCmnDl,ue);
15646 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
15647 ue->dlTaLnk.node = (PTR)NULLP;
15652 /* Fix: syed Remove the msg4Proc from cell
15653 * msg4Retx Queue. I have used CMN scheduler function
15654 * directly. Please define a new API and call this
15655 * function through that. */
15658 * @brief This function removes MSG4 HARQ process from cell RETX Queues.
15662 * Function: rgSCHCmnDlMsg4ProcRmvFrmRetx
15663 * Purpose: This function removes MSG4 HARQ process from cell RETX Queues.
15665 * Invoked by: UE/RACB deletion.
15667 * @param[in] RgSchCellCb* cell
15668 * @param[in] RgSchDlHqProc* hqP
15673 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx
15676 RgSchDlHqProcCb *hqP
15679 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx(cell, hqP)
15681 RgSchDlHqProcCb *hqP;
15684 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15685 TRC2(rgSCHCmnDlMsg4ProcRmvFrmRetx);
15687 if (hqP->tbInfo[0].ccchSchdInfo.retxLnk.node)
15689 if (hqP->hqE->msg4Proc == hqP)
15691 cmLListDelFrm(&cellSch->dl.msg4RetxLst, \
15692 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15693 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15696 else if(hqP->hqE->ccchSduProc == hqP)
15698 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
15699 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15700 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15709 * @brief This function adds a HARQ process for retx.
15713 * Function: rgSCHCmnDlProcAddToRetx
15714 * Purpose: This function adds a HARQ process to retransmission
15715 * queue. This may be performed when a HARQ ack is
15718 * Invoked by: HARQ feedback processing
15720 * @param[in] RgSchCellCb* cell
15721 * @param[in] RgSchDlHqProc* hqP
15726 PUBLIC Void rgSCHCmnDlProcAddToRetx
15729 RgSchDlHqProcCb *hqP
15732 PUBLIC Void rgSCHCmnDlProcAddToRetx(cell, hqP)
15734 RgSchDlHqProcCb *hqP;
15737 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15738 TRC2(rgSCHCmnDlProcAddToRetx);
15740 if (hqP->hqE->msg4Proc == hqP) /* indicating msg4 transmission */
15742 cmLListAdd2Tail(&cellSch->dl.msg4RetxLst, \
15743 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15744 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15747 else if(hqP->hqE->ccchSduProc == hqP)
15749 /*If CCCH SDU being transmitted without cont res CE*/
15750 cmLListAdd2Tail(&cellSch->dl.ccchSduRetxLst,
15751 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15752 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15758 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
15760 /* Invoke SPS module for SPS HARQ proc re-transmission handling */
15761 rgSCHCmnSpsDlProcAddToRetx(cell, hqP);
15764 #endif /* LTEMAC_SPS */
15766 if((TRUE == cell->emtcEnable)
15767 && (TRUE == hqP->hqE->ue->isEmtcUe))
15769 cellSch->apisEmtcDl->rgSCHDlProcAddToRetx(cell, hqP);
15774 cellSch->apisDl->rgSCHDlProcAddToRetx(cell, hqP);
15782 * @brief This function performs RI validation and
15783 * updates it to the ueCb.
15787 * Function: rgSCHCmnDlSetUeRi
15788 * Purpose: This function performs RI validation and
15789 * updates it to the ueCb.
15791 * Invoked by: rgSCHCmnDlCqiInd
15793 * @param[in] RgSchCellCb *cell
15794 * @param[in] RgSchUeCb *ue
15796 * @param[in] Bool isPeriodic
15801 PRIVATE Void rgSCHCmnDlSetUeRi
15809 PRIVATE Void rgSCHCmnDlSetUeRi(cell, ue, ri, isPer)
15816 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15817 RgSchCmnUeInfo *ueSchCmn = RG_SCH_CMN_GET_CMN_UE(ue);
15818 TRC2(rgSCHCmnDlSetUeRi);
15821 RgSchUePCqiCb *cqiCb = RG_SCH_GET_UE_CELL_CQI_CB(ue,cell);
15826 /* FIX for RRC Reconfiguration issue */
15827 /* ccpu00140894- During Tx Mode transition RI report will not entertained for
15828 * specific during which SCH expecting UE can complete TX mode transition*/
15829 if (ue->txModeTransCmplt == FALSE)
15834 /* Restrict the Number of TX layers to cell->numTxAntPorts.
15835 * Protection from invalid RI values. */
15836 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
15838 /* Special case of converting PMI to sane value when
15839 * there is a switch in RI from 1 to 2 and PMI reported
15840 * for RI=1 is invalid for RI=2 */
15841 if ((cell->numTxAntPorts == 2) && (ue->mimoInfo.txMode == RGR_UE_TM_4))
15843 if ((ri == 2) && ( ueDl->mimoInfo.ri == 1))
15845 ueDl->mimoInfo.pmi = (ueDl->mimoInfo.pmi < 2)? 1:2;
15849 /* Restrict the Number of TX layers according to the UE Category */
15850 ueDl->mimoInfo.ri = RGSCH_MIN(ri, rgUeCatTbl[ueSchCmn->ueCat].maxTxLyrs);
15852 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].riCnt[ueDl->mimoInfo.ri-1]++;
15853 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15857 ue->tenbStats->stats.nonPersistent.sch[0].riCnt[ueDl->mimoInfo.ri-1]++;
15858 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15864 /* If RI is from Periodic CQI report */
15865 cqiCb->perRiVal = ueDl->mimoInfo.ri;
15866 /* Reset at every Periodic RI Reception */
15867 cqiCb->invalidateCqi = FALSE;
15871 /* If RI is from Aperiodic CQI report */
15872 if (cqiCb->perRiVal != ueDl->mimoInfo.ri)
15874 /* if this aperRI is different from last reported
15875 * perRI then invalidate all CQI reports till next
15877 cqiCb->invalidateCqi = TRUE;
15881 cqiCb->invalidateCqi = FALSE;
15886 if (ueDl->mimoInfo.ri > 1)
15888 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15890 else if (ue->mimoInfo.txMode == RGR_UE_TM_3) /* ri == 1 */
15892 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15900 * @brief This function performs PMI validation and
15901 * updates it to the ueCb.
15905 * Function: rgSCHCmnDlSetUePmi
15906 * Purpose: This function performs PMI validation and
15907 * updates it to the ueCb.
15909 * Invoked by: rgSCHCmnDlCqiInd
15911 * @param[in] RgSchCellCb *cell
15912 * @param[in] RgSchUeCb *ue
15913 * @param[in] U8 pmi
15918 PRIVATE S16 rgSCHCmnDlSetUePmi
15925 PRIVATE S16 rgSCHCmnDlSetUePmi(cell, ue, pmi)
15931 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15932 TRC2(rgSCHCmnDlSetUePmi);
15934 if (ue->txModeTransCmplt == FALSE)
15939 if (cell->numTxAntPorts == 2)
15945 if (ueDl->mimoInfo.ri == 2)
15947 /*ccpu00118150 - MOD - changed pmi value validation from 0 to 2*/
15948 /* PMI 2 and 3 are invalid incase of 2 TxAnt and 2 Layered SM */
15949 if (pmi == 2 || pmi == 3)
15953 ueDl->mimoInfo.pmi = pmi+1;
15957 ueDl->mimoInfo.pmi = pmi;
15960 else if (cell->numTxAntPorts == 4)
15966 ueDl->mimoInfo.pmi = pmi;
15968 /* Reset the No PMI Flag in forceTD */
15969 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
15974 * @brief This function Updates the DL CQI on PUCCH for the UE.
15978 * Function: rgSCHCmnDlProcCqiMode10
15980 * This function updates the DL CQI on PUCCH for the UE.
15982 * Invoked by: rgSCHCmnDlCqiOnPucchInd
15984 * Processing Steps:
15986 * @param[in] RgSchCellCb *cell
15987 * @param[in] RgSchUeCb *ue
15988 * @param[in] TfuDlCqiRpt *dlCqiRpt
15993 #ifdef RGR_CQI_REPT
15995 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
15999 TfuDlCqiPucch *pucchCqi,
16003 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail)
16006 TfuDlCqiPucch *pucchCqi;
16011 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16015 TfuDlCqiPucch *pucchCqi
16018 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi)
16021 TfuDlCqiPucch *pucchCqi;
16025 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16026 TRC2(rgSCHCmnDlProcCqiMode10);
16028 if (pucchCqi->u.mode10Info.type == TFU_RPT_CQI)
16030 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16031 /* Checking whether the decoded CQI is a value between 1 and 15*/
16032 if((pucchCqi->u.mode10Info.u.cqi) && (pucchCqi->u.mode10Info.u.cqi
16033 < RG_SCH_CMN_MAX_CQI))
16035 ueDl->cqiFlag = TRUE;
16036 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode10Info.u.cqi;
16037 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16038 /* ccpu00117452 - MOD - Changed macro name from
16039 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16040 #ifdef RGR_CQI_REPT
16041 *isCqiAvail = TRUE;
16049 else if (pucchCqi->u.mode10Info.type == TFU_RPT_RI)
16051 if ( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode10Info.u.ri) )
16053 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode10Info.u.ri,
16058 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16059 pucchCqi->u.mode10Info.u.ri,ue->ueId);
16066 * @brief This function Updates the DL CQI on PUCCH for the UE.
16070 * Function: rgSCHCmnDlProcCqiMode11
16072 * This function updates the DL CQI on PUCCH for the UE.
16074 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16076 * Processing Steps:
16077 * Process CQI MODE 11
16078 * @param[in] RgSchCellCb *cell
16079 * @param[in] RgSchUeCb *ue
16080 * @param[in] TfuDlCqiRpt *dlCqiRpt
16085 #ifdef RGR_CQI_REPT
16087 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16091 TfuDlCqiPucch *pucchCqi,
16093 Bool *is2ndCwCqiAvail
16096 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16099 TfuDlCqiPucch *pucchCqi;
16101 Bool *is2ndCwCqiAvail;
16105 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16109 TfuDlCqiPucch *pucchCqi
16112 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi)
16115 TfuDlCqiPucch *pucchCqi;
16119 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16120 TRC2(rgSCHCmnDlProcCqiMode11);
16122 if (pucchCqi->u.mode11Info.type == TFU_RPT_CQI)
16124 ue->mimoInfo.puschFdbkVld = FALSE;
16125 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16126 if((pucchCqi->u.mode11Info.u.cqi.cqi) &&
16127 (pucchCqi->u.mode11Info.u.cqi.cqi < RG_SCH_CMN_MAX_CQI))
16129 ueDl->cqiFlag = TRUE;
16130 /* ccpu00117452 - MOD - Changed macro name from
16131 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16132 #ifdef RGR_CQI_REPT
16133 *isCqiAvail = TRUE;
16135 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode11Info.u.cqi.cqi;
16136 if (pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.pres)
16138 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16139 ueDl->mimoInfo.cwInfo[1].cqi, \
16140 pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.val);
16141 #ifdef RGR_CQI_REPT
16142 /* ccpu00117259 - ADD - Considering second codeword CQI info
16143 incase of MIMO for CQI Reporting */
16144 *is2ndCwCqiAvail = TRUE;
16152 rgSCHCmnDlSetUePmi(cell, ue, \
16153 pucchCqi->u.mode11Info.u.cqi.pmi);
16155 else if (pucchCqi->u.mode11Info.type == TFU_RPT_RI)
16157 if( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode11Info.u.ri))
16159 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode11Info.u.ri,
16164 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16165 pucchCqi->u.mode11Info.u.ri,ue->ueId);
16172 * @brief This function Updates the DL CQI on PUCCH for the UE.
16176 * Function: rgSCHCmnDlProcCqiMode20
16178 * This function updates the DL CQI on PUCCH for the UE.
16180 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16182 * Processing Steps:
16183 * Process CQI MODE 20
16184 * @param[in] RgSchCellCb *cell
16185 * @param[in] RgSchUeCb *ue
16186 * @param[in] TfuDlCqiRpt *dlCqiRpt
16191 #ifdef RGR_CQI_REPT
16193 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16197 TfuDlCqiPucch *pucchCqi,
16201 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail )
16204 TfuDlCqiPucch *pucchCqi;
16209 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16213 TfuDlCqiPucch *pucchCqi
16216 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi)
16219 TfuDlCqiPucch *pucchCqi;
16223 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16224 TRC2(rgSCHCmnDlProcCqiMode20);
16226 if (pucchCqi->u.mode20Info.type == TFU_RPT_CQI)
16228 if (pucchCqi->u.mode20Info.u.cqi.isWideband)
16230 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16231 if((pucchCqi->u.mode20Info.u.cqi.u.wideCqi) &&
16232 (pucchCqi->u.mode20Info.u.cqi.u.wideCqi < RG_SCH_CMN_MAX_CQI))
16234 ueDl->cqiFlag = TRUE;
16235 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode20Info.u.cqi.\
16237 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16238 /* ccpu00117452 - MOD - Changed macro name from
16239 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16240 #ifdef RGR_CQI_REPT
16241 *isCqiAvail = TRUE;
16250 else if (pucchCqi->u.mode20Info.type == TFU_RPT_RI)
16252 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode20Info.u.ri))
16254 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode20Info.u.ri,
16259 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16260 pucchCqi->u.mode20Info.u.ri,ue->ueId);
16268 * @brief This function Updates the DL CQI on PUCCH for the UE.
16272 * Function: rgSCHCmnDlProcCqiMode21
16274 * This function updates the DL CQI on PUCCH for the UE.
16276 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16278 * Processing Steps:
16279 * Process CQI MODE 21
16280 * @param[in] RgSchCellCb *cell
16281 * @param[in] RgSchUeCb *ue
16282 * @param[in] TfuDlCqiRpt *dlCqiRpt
16287 #ifdef RGR_CQI_REPT
16289 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16293 TfuDlCqiPucch *pucchCqi,
16295 Bool *is2ndCwCqiAvail
16298 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16301 TfuDlCqiPucch *pucchCqi;
16302 TfuDlCqiRpt *dlCqiRpt;
16304 Bool *is2ndCwCqiAvail;
16308 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16312 TfuDlCqiPucch *pucchCqi
16315 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi)
16318 TfuDlCqiPucch *pucchCqi;
16322 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16323 TRC2(rgSCHCmnDlProcCqiMode21);
16325 if (pucchCqi->u.mode21Info.type == TFU_RPT_CQI)
16327 ue->mimoInfo.puschFdbkVld = FALSE;
16328 if (pucchCqi->u.mode21Info.u.cqi.isWideband)
16330 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16331 if((pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi) &&
16332 (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi < RG_SCH_CMN_MAX_CQI))
16334 ueDl->cqiFlag = TRUE;
16335 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode21Info.u.cqi.\
16337 if (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.pres)
16339 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16340 ueDl->mimoInfo.cwInfo[1].cqi, \
16341 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.val);
16342 #ifdef RGR_CQI_REPT
16343 /* ccpu00117259 - ADD - Considering second codeword CQI info
16344 incase of MIMO for CQI Reporting */
16345 *is2ndCwCqiAvail = TRUE;
16348 /* ccpu00117452 - MOD - Changed macro name from
16349 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16350 #ifdef RGR_CQI_REPT
16351 *isCqiAvail = TRUE;
16358 rgSCHCmnDlSetUePmi(cell, ue, \
16359 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.pmi);
16362 else if (pucchCqi->u.mode21Info.type == TFU_RPT_RI)
16364 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode21Info.u.ri))
16366 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode21Info.u.ri,
16371 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16372 pucchCqi->u.mode21Info.u.ri,ue->ueId);
16380 * @brief This function Updates the DL CQI on PUCCH for the UE.
16384 * Function: rgSCHCmnDlCqiOnPucchInd
16386 * This function updates the DL CQI on PUCCH for the UE.
16388 * Invoked by: rgSCHCmnDlCqiInd
16390 * Processing Steps:
16391 * - Depending on the reporting mode of the PUCCH, the CQI/PMI/RI values
16392 * are updated and stored for each UE
16394 * @param[in] RgSchCellCb *cell
16395 * @param[in] RgSchUeCb *ue
16396 * @param[in] TfuDlCqiRpt *dlCqiRpt
16401 #ifdef RGR_CQI_REPT
16403 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16407 TfuDlCqiPucch *pucchCqi,
16408 RgrUeCqiRept *ueCqiRept,
16410 Bool *is2ndCwCqiAvail
16413 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16416 TfuDlCqiPucch *pucchCqi;
16417 RgrUeCqiRept *ueCqiRept;
16419 Bool *is2ndCwCqiAvail;
16423 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16427 TfuDlCqiPucch *pucchCqi
16430 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi)
16433 TfuDlCqiPucch *pucchCqi;
16437 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16438 TRC2(rgSCHCmnDlCqiOnPucchInd);
16440 /* ccpu00117452 - MOD - Changed
16441 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16442 #ifdef RGR_CQI_REPT
16443 /* Save CQI mode information in the report */
16444 ueCqiRept->cqiMode = pucchCqi->mode;
16447 switch(pucchCqi->mode)
16449 case TFU_PUCCH_CQI_MODE10:
16450 #ifdef RGR_CQI_REPT
16451 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail);
16453 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi);
16455 ueDl->cqiFlag = TRUE;
16457 case TFU_PUCCH_CQI_MODE11:
16458 #ifdef RGR_CQI_REPT
16459 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail,
16462 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi);
16464 ueDl->cqiFlag = TRUE;
16466 case TFU_PUCCH_CQI_MODE20:
16467 #ifdef RGR_CQI_REPT
16468 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail);
16470 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi);
16472 ueDl->cqiFlag = TRUE;
16474 case TFU_PUCCH_CQI_MODE21:
16475 #ifdef RGR_CQI_REPT
16476 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail,
16479 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi);
16481 ueDl->cqiFlag = TRUE;
16485 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unknown CQI Mode %d",
16486 pucchCqi->mode,ue->ueId);
16487 /* ccpu00117452 - MOD - Changed macro name from
16488 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16489 #ifdef RGR_CQI_REPT
16490 *isCqiAvail = FALSE;
16497 } /* rgSCHCmnDlCqiOnPucchInd */
16501 * @brief This function Updates the DL CQI on PUSCH for the UE.
16505 * Function: rgSCHCmnDlCqiOnPuschInd
16507 * This function updates the DL CQI on PUSCH for the UE.
16509 * Invoked by: rgSCHCmnDlCqiInd
16511 * Processing Steps:
16512 * - Depending on the reporting mode of the PUSCH, the CQI/PMI/RI values
16513 * are updated and stored for each UE
16515 * @param[in] RgSchCellCb *cell
16516 * @param[in] RgSchUeCb *ue
16517 * @param[in] TfuDlCqiRpt *dlCqiRpt
16522 #ifdef RGR_CQI_REPT
16524 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16528 TfuDlCqiPusch *puschCqi,
16529 RgrUeCqiRept *ueCqiRept,
16531 Bool *is2ndCwCqiAvail
16534 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16537 TfuDlCqiPusch *puschCqi;
16538 RgrUeCqiRept *ueCqiRept;
16540 Bool *is2ndCwCqiAvail;
16544 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16548 TfuDlCqiPusch *puschCqi
16551 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi)
16554 TfuDlCqiPusch *puschCqi;
16558 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16560 TRC2(rgSCHCmnDlCqiOnPuschInd);
16561 if (puschCqi->ri.pres == PRSNT_NODEF)
16563 if (RG_SCH_CMN_IS_RI_VALID(puschCqi->ri.val))
16565 /* Saving the previous ri value to revert back
16566 in case PMI update failed */
16567 if (RGR_UE_TM_4 == ue->mimoInfo.txMode ) /* Cheking for TM4. TM8 check later */
16569 prevRiVal = ueDl->mimoInfo.ri;
16571 rgSCHCmnDlSetUeRi(cell, ue, puschCqi->ri.val, FALSE);
16575 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16576 puschCqi->ri.val,ue->ueId);
16580 ue->mimoInfo.puschFdbkVld = FALSE;
16581 /* ccpu00117452 - MOD - Changed macro name from
16582 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16583 #ifdef RGR_CQI_REPT
16584 /* Save CQI mode information in the report */
16585 ueCqiRept->cqiMode = puschCqi->mode;
16586 /* ccpu00117259 - DEL - removed default setting of isCqiAvail to TRUE */
16589 switch(puschCqi->mode)
16591 case TFU_PUSCH_CQI_MODE_20:
16592 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16593 /* Checking whether the decoded CQI is a value between 1 and 15*/
16594 if((puschCqi->u.mode20Info.wideBandCqi) &&
16595 (puschCqi->u.mode20Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16597 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode20Info.wideBandCqi;
16598 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16599 /* ccpu00117452 - MOD - Changed macro name from
16600 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16601 #ifdef RGR_CQI_REPT
16602 *isCqiAvail = TRUE;
16610 case TFU_PUSCH_CQI_MODE_30:
16611 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16612 if((puschCqi->u.mode30Info.wideBandCqi) &&
16613 (puschCqi->u.mode30Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16615 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode30Info.wideBandCqi;
16616 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16617 /* ccpu00117452 - MOD - Changed macro name from
16618 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16619 #ifdef RGR_CQI_REPT
16620 *isCqiAvail = TRUE;
16624 extern U32 gACqiRcvdCount;
16635 case TFU_PUSCH_CQI_MODE_12:
16636 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16637 if((puschCqi->u.mode12Info.cqiIdx[0]) &&
16638 (puschCqi->u.mode12Info.cqiIdx[0] < RG_SCH_CMN_MAX_CQI))
16640 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode12Info.cqiIdx[0];
16641 /* ccpu00117452 - MOD - Changed macro name from
16642 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16643 #ifdef RGR_CQI_REPT
16644 *isCqiAvail = TRUE;
16651 if((puschCqi->u.mode12Info.cqiIdx[1]) &&
16652 (puschCqi->u.mode12Info.cqiIdx[1] < RG_SCH_CMN_MAX_CQI))
16654 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode12Info.cqiIdx[1];
16655 /* ccpu00117452 - MOD - Changed macro name from
16656 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16657 #ifdef RGR_CQI_REPT
16658 /* ccpu00117259 - ADD - Considering second codeword CQI info
16659 incase of MIMO for CQI Reporting */
16660 *is2ndCwCqiAvail = TRUE;
16667 ue->mimoInfo.puschFdbkVld = TRUE;
16668 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_12;
16669 ue->mimoInfo.puschPmiInfo.u.mode12Info = puschCqi->u.mode12Info;
16670 /* : resetting this is time based. Make use of CQI reporting
16671 * periodicity, DELTA's in determining the exact time at which this
16672 * need to be reset. */
16674 case TFU_PUSCH_CQI_MODE_22:
16675 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16676 if((puschCqi->u.mode22Info.wideBandCqi[0]) &&
16677 (puschCqi->u.mode22Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16679 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode22Info.wideBandCqi[0];
16680 /* ccpu00117452 - MOD - Changed macro name from
16681 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16682 #ifdef RGR_CQI_REPT
16683 *isCqiAvail = TRUE;
16690 if((puschCqi->u.mode22Info.wideBandCqi[1]) &&
16691 (puschCqi->u.mode22Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16693 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode22Info.wideBandCqi[1];
16694 /* ccpu00117452 - MOD - Changed macro name from
16695 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16696 #ifdef RGR_CQI_REPT
16697 /* ccpu00117259 - ADD - Considering second codeword CQI info
16698 incase of MIMO for CQI Reporting */
16699 *is2ndCwCqiAvail = TRUE;
16706 rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode22Info.wideBandPmi);
16707 ue->mimoInfo.puschFdbkVld = TRUE;
16708 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_22;
16709 ue->mimoInfo.puschPmiInfo.u.mode22Info = puschCqi->u.mode22Info;
16711 case TFU_PUSCH_CQI_MODE_31:
16712 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16713 if((puschCqi->u.mode31Info.wideBandCqi[0]) &&
16714 (puschCqi->u.mode31Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16716 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode31Info.wideBandCqi[0];
16717 /* ccpu00117452 - MOD - Changed macro name from
16718 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16719 #ifdef RGR_CQI_REPT
16720 *isCqiAvail = TRUE;
16723 if (ueDl->mimoInfo.ri > 1)
16725 if((puschCqi->u.mode31Info.wideBandCqi[1]) &&
16726 (puschCqi->u.mode31Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16728 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode31Info.wideBandCqi[1];
16729 /* ccpu00117452 - MOD - Changed macro name from
16730 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16731 #ifdef RGR_CQI_REPT
16732 /* ccpu00117259 - ADD - Considering second codeword CQI info
16733 incase of MIMO for CQI Reporting */
16734 *is2ndCwCqiAvail = TRUE;
16738 if (rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode31Info.pmi) != ROK)
16740 /* To avoid Rank and PMI inconsistency */
16741 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16742 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16744 ueDl->mimoInfo.ri = prevRiVal;
16747 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_31;
16748 ue->mimoInfo.puschPmiInfo.u.mode31Info = puschCqi->u.mode31Info;
16752 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Unknown CQI Mode %d CRNTI:%d",
16753 puschCqi->mode,ue->ueId);
16754 /* CQI decoding failed revert the RI to previous value */
16755 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16756 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16758 ueDl->mimoInfo.ri = prevRiVal;
16760 /* ccpu00117452 - MOD - Changed macro name from
16761 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16762 #ifdef RGR_CQI_REPT
16763 *isCqiAvail = FALSE;
16764 /* ccpu00117259 - ADD - Considering second codeword CQI info
16765 incase of MIMO for CQI Reporting */
16766 *is2ndCwCqiAvail = FALSE;
16773 } /* rgSCHCmnDlCqiOnPuschInd */
16777 * @brief This function Updates the DL CQI for the UE.
16781 * Function: rgSCHCmnDlCqiInd
16782 * Purpose: Updates the DL CQI for the UE
16786 * @param[in] RgSchCellCb *cell
16787 * @param[in] RgSchUeCb *ue
16788 * @param[in] TfuDlCqiRpt *dlCqi
16793 PUBLIC Void rgSCHCmnDlCqiInd
16799 CmLteTimingInfo timingInfo
16802 PUBLIC Void rgSCHCmnDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo)
16807 CmLteTimingInfo timingInfo;
16810 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
16811 /* ccpu00117452 - MOD - Changed macro name from
16812 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16813 #ifdef RGR_CQI_REPT
16814 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16815 RgrUeCqiRept ueCqiRept = {{0}};
16816 Bool isCqiAvail = FALSE;
16817 /* ccpu00117259 - ADD - Considering second codeword CQI info
16818 incase of MIMO for CQI Reporting */
16819 Bool is2ndCwCqiAvail = FALSE;
16822 TRC2(rgSCHCmnDlCqiInd);
16824 #ifdef RGR_CQI_REPT
16827 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16831 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16836 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi);
16840 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi);
16844 #ifdef CQI_CONFBITMASK_DROP
16845 if(!ue->cqiConfBitMask)
16847 if (ueDl->mimoInfo.cwInfo[0].cqi >15)
16849 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16850 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16852 else if ( ueDl->mimoInfo.cwInfo[0].cqi >= ue->prevCqi)
16854 ue->prevCqi = ueDl->mimoInfo.cwInfo[0].cqi;
16858 U8 dlCqiDeltaPrev = 0;
16859 dlCqiDeltaPrev = ue->prevCqi - ueDl->mimoInfo.cwInfo[0].cqi;
16860 if (dlCqiDeltaPrev > 3)
16861 dlCqiDeltaPrev = 3;
16862 if ((ue->prevCqi - dlCqiDeltaPrev) < 6)
16868 ue->prevCqi = ue->prevCqi - dlCqiDeltaPrev;
16870 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16871 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16877 /* ccpu00117452 - MOD - Changed macro name from
16878 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16879 #ifdef RGR_CQI_REPT
16880 /* ccpu00117259 - ADD - Considering second codeword CQI info
16881 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail\
16882 in 'if' condition*/
16883 if (RG_SCH_CQIR_IS_PUSHNCQI_ENBLE(ue) && (isCqiAvail || is2ndCwCqiAvail))
16885 ueCqiRept.cqi[0] = ueDl->mimoInfo.cwInfo[0].cqi;
16887 /* ccpu00117259 - ADD - Considering second codeword CQI info
16888 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail
16889 in 'if' condition*/
16890 ueCqiRept.cqi[1] = 0;
16891 if(is2ndCwCqiAvail)
16893 ueCqiRept.cqi[1] = ueDl->mimoInfo.cwInfo[1].cqi;
16895 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, &ueCqiRept);
16900 rgSCHCmnDlSetUeAllocLmtLa(cell, ue);
16901 rgSCHCheckAndSetTxScheme(cell, ue);
16904 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), ue->isEmtcUe);
16906 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), FALSE);
16910 if (cellSch->dl.isDlFreqSel)
16912 cellSch->apisDlfs->rgSCHDlfsDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo);
16915 /* Call SPS module to update CQI indication */
16916 rgSCHCmnSpsDlCqiIndHndlr(cell, ue, timingInfo);
16918 /* Call Specific scheduler to process on dlCqiInd */
16920 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
16922 cellSch->apisEmtcDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16927 cellSch->apisDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16930 #ifdef RG_PFS_STATS
16931 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].avgCqi +=
16932 ueDl->mimoInfo.cwInfo[0].cqi;
16933 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].totalCqiOcc++;
16937 ueDl->avgCqi += ueDl->mimoInfo.cwInfo[0].cqi;
16938 ueDl->numCqiOccns++;
16939 if (ueDl->mimoInfo.ri == 1)
16950 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16951 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16952 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw0Cqi ++;
16953 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw1Cqi ++;
16954 cell->tenbStats->sch.dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16955 cell->tenbStats->sch.dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16956 cell->tenbStats->sch.dlNumCw0Cqi ++;
16957 cell->tenbStats->sch.dlNumCw1Cqi ++;
16964 * @brief This function calculates the wideband CQI from SNR
16965 * reported for each RB.
16969 * Function: rgSCHCmnCalcWcqiFrmSnr
16970 * Purpose: Wideband CQI calculation from SNR
16972 * Invoked by: RG SCH
16974 * @param[in] RgSchCellCb *cell
16975 * @param[in] TfuSrsRpt *srsRpt,
16976 * @return Wideband CQI
16980 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr
16986 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr(cell,srsRpt)
16991 U8 wideCqi=1; /*Calculated value from SNR*/
16992 TRC2(rgSCHCmnCalcWcqiFrmSnr);
16993 /*Need to map a certain SNR with a WideCQI value.
16994 * The CQI calculation is still primitive. Further, need to
16995 * use a improvized method for calculating WideCQI from SNR*/
16996 if (srsRpt->snr[0] <=50)
17000 else if (srsRpt->snr[0]>=51 && srsRpt->snr[0] <=100)
17004 else if (srsRpt->snr[0]>=101 && srsRpt->snr[0] <=150)
17008 else if (srsRpt->snr[0]>=151 && srsRpt->snr[0] <=200)
17012 else if (srsRpt->snr[0]>=201 && srsRpt->snr[0] <=250)
17021 }/*rgSCHCmnCalcWcqiFrmSnr*/
17025 * @brief This function Updates the SRS for the UE.
17029 * Function: rgSCHCmnSrsInd
17030 * Purpose: Updates the UL SRS for the UE
17034 * @param[in] RgSchCellCb *cell
17035 * @param[in] RgSchUeCb *ue
17036 * @param[in] TfuSrsRpt *srsRpt,
17041 PUBLIC Void rgSCHCmnSrsInd
17046 CmLteTimingInfo timingInfo
17049 PUBLIC Void rgSCHCmnSrsInd(cell, ue, srsRpt, timingInfo)
17053 CmLteTimingInfo timingInfo;
17056 U8 wideCqi; /*Calculated value from SNR*/
17057 U32 recReqTime; /*Received Time in TTI*/
17058 TRC2(rgSCHCmnSrsInd);
17060 recReqTime = (timingInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) + timingInfo.slot;
17061 ue->srsCb.selectedAnt = (recReqTime/ue->srsCb.peri)%2;
17062 if(srsRpt->wideCqiPres)
17064 wideCqi = srsRpt->wideCqi;
17068 wideCqi = rgSCHCmnCalcWcqiFrmSnr(cell, srsRpt);
17070 rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi);
17072 }/*rgSCHCmnSrsInd*/
17077 * @brief This function is a handler for TA report for an UE.
17081 * Function: rgSCHCmnDlTARpt
17082 * Purpose: Determine based on UE_IDLE_TIME threshold,
17083 * whether UE needs to be Linked to the scheduler's TA list OR
17084 * if it needs a PDCCH Order.
17089 * @param[in] RgSchCellCb *cell
17090 * @param[in] RgSchUeCb *ue
17095 PUBLIC Void rgSCHCmnDlTARpt
17101 PUBLIC Void rgSCHCmnDlTARpt(cell, ue)
17106 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17107 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
17108 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
17109 CmLListCp poInactvLst;
17111 TRC2(rgSCHCmnDlTARpt);
17113 /* RACHO: If UE idle time is more than threshold, then
17114 * set its poInactv pdcch order inactivity */
17115 /* Fix : syed Ignore if TaTmr is not configured */
17116 if ((ue->dl.taCb.cfgTaTmr) && (rgSCHCmnUeIdleExdThrsld(cell, ue) == ROK))
17118 U32 prevDlMsk = ue->dl.dlInactvMask;
17119 U32 prevUlMsk = ue->ul.ulInactvMask;
17120 ue->dl.dlInactvMask |= RG_PDCCHODR_INACTIVE;
17121 ue->ul.ulInactvMask |= RG_PDCCHODR_INACTIVE;
17122 /* Indicate Specific scheduler for this UEs inactivity */
17123 cmLListInit(&poInactvLst);
17124 cmLListAdd2Tail(&poInactvLst, &ueDl->rachInfo.inActUeLnk);
17125 ueDl->rachInfo.inActUeLnk.node = (PTR)ue;
17126 /* Send inactivate ind only if not already sent */
17127 if (prevDlMsk == 0)
17129 cellSch->apisDl->rgSCHDlInactvtUes(cell, &poInactvLst);
17131 if (prevUlMsk == 0)
17133 cellSch->apisUl->rgSCHUlInactvtUes(cell, &poInactvLst);
17138 /* Fix: ccpu00124009 Fix for loop in the linked list "cellDl->taLst" */
17139 if (!ue->dlTaLnk.node)
17142 if(cell->emtcEnable)
17146 rgSCHEmtcAddToTaLst(cellDl,ue);
17153 cmLListAdd2Tail(&cellDl->taLst, &ue->dlTaLnk);
17154 ue->dlTaLnk.node = (PTR)ue;
17159 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
17160 "<TA>TA duplicate entry attempt failed: UEID:%u",
17169 * @brief Indication of UL CQI.
17173 * Function : rgSCHCmnFindUlCqiUlTxAnt
17175 * - Finds the Best Tx Antenna amongst the CQIs received
17176 * from Two Tx Antennas.
17178 * @param[in] RgSchCellCb *cell
17179 * @param[in] RgSchUeCb *ue
17180 * @param[in] U8 wideCqi
17184 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt
17191 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi)
17197 ue->validTxAnt = 1;
17199 } /* rgSCHCmnFindUlCqiUlTxAnt */
17203 * @brief Indication of UL CQI.
17207 * Function : rgSCHCmnUlCqiInd
17209 * - Updates uplink CQI information for the UE. Computes and
17210 * stores the lowest CQI of CQIs reported in all subbands.
17212 * @param[in] RgSchCellCb *cell
17213 * @param[in] RgSchUeCb *ue
17214 * @param[in] TfuUlCqiRpt *ulCqiInfo
17218 PUBLIC Void rgSCHCmnUlCqiInd
17222 TfuUlCqiRpt *ulCqiInfo
17225 PUBLIC Void rgSCHCmnUlCqiInd(cell, ue, ulCqiInfo)
17228 TfuUlCqiRpt *ulCqiInfo;
17231 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17232 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17237 #if (defined(SCH_STATS) || defined(TENB_STATS))
17238 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
17241 TRC2(rgSCHCmnUlCqiInd);
17242 /* consider inputs from SRS handlers about SRS occassions
17243 * in determining the UL TX Antenna selection */
17244 ueUl->crntUlCqi[0] = ulCqiInfo->wideCqi;
17246 ueUl->validUlCqi = ueUl->crntUlCqi[0];
17247 ue->validTxAnt = 0;
17249 iTbsNew = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][ueUl->validUlCqi];
17250 previTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
17252 if (RG_ITBS_DIFF(iTbsNew, previTbs) > 5)
17254 /* Ignore this iTBS report and mark that last iTBS report was */
17255 /* ignored so that subsequently we reset the LA algorithm */
17256 ueUl->ulLaCb.lastiTbsIgnored = TRUE;
17260 if (ueUl->ulLaCb.lastiTbsIgnored != TRUE)
17262 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17263 (80 * ueUl->ulLaCb.cqiBasediTbs))/100;
17267 /* Reset the LA as iTbs in use caught up with the value */
17268 /* reported by UE. */
17269 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17270 (80 * previTbs * 100))/100;
17271 ueUl->ulLaCb.deltaiTbs = 0;
17272 ueUl->ulLaCb.lastiTbsIgnored = FALSE;
17277 rgSCHPwrUlCqiInd(cell, ue);
17279 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17281 rgSCHCmnSpsUlCqiInd(cell, ue);
17284 /* Applicable to only some schedulers */
17286 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
17288 cellSch->apisEmtcUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17293 cellSch->apisUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17297 ueUl->numCqiOccns++;
17298 ueUl->avgCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17303 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17304 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulNumCqi ++;
17305 cell->tenbStats->sch.ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17306 cell->tenbStats->sch.ulNumCqi ++;
17311 } /* rgSCHCmnUlCqiInd */
17314 * @brief Returns HARQ proc for which data expected now.
17318 * Function: rgSCHCmnUlHqProcForUe
17319 * Purpose: This function returns the harq process for
17320 * which data is expected in the current subframe.
17321 * It does not validate that the HARQ process
17322 * has an allocation.
17326 * @param[in] RgSchCellCb *cell
17327 * @param[in] CmLteTimingInfo frm
17328 * @param[in] RgSchUeCb *ue
17329 * @param[out] RgSchUlHqProcCb **procRef
17333 PUBLIC Void rgSCHCmnUlHqProcForUe
17336 CmLteTimingInfo frm,
17338 RgSchUlHqProcCb **procRef
17341 PUBLIC Void rgSCHCmnUlHqProcForUe(cell, frm, ue, procRef)
17343 CmLteTimingInfo frm;
17345 RgSchUlHqProcCb **procRef;
17349 U8 procId = rgSCHCmnGetUlHqProcIdx(&frm, cell);
17351 TRC2(rgSCHCmnUlHqProcForUe);
17353 *procRef = rgSCHUhmGetUlHqProc(cell, ue, procId);
17355 *procRef = rgSCHUhmGetUlProcByTime(cell, ue, frm);
17362 * @brief Update harq process for allocation.
17366 * Function : rgSCHCmnUpdUlHqProc
17368 * This function is invoked when harq process
17369 * control block is now in a new memory location
17370 * thus requiring a pointer/reference update.
17372 * @param[in] RgSchCellCb *cell
17373 * @param[in] RgSchUlHqProcCb *curProc
17374 * @param[in] RgSchUlHqProcCb *oldProc
17380 PUBLIC S16 rgSCHCmnUpdUlHqProc
17383 RgSchUlHqProcCb *curProc,
17384 RgSchUlHqProcCb *oldProc
17387 PUBLIC S16 rgSCHCmnUpdUlHqProc(cell, curProc, oldProc)
17389 RgSchUlHqProcCb *curProc;
17390 RgSchUlHqProcCb *oldProc;
17393 TRC2(rgSCHCmnUpdUlHqProc);
17397 #if (ERRCLASS & ERRCLS_DEBUG)
17398 if (curProc->alloc == NULLP)
17403 curProc->alloc->hqProc = curProc;
17405 } /* rgSCHCmnUpdUlHqProc */
17408 /*MS_WORKAROUND for CR FIXME */
17410 * @brief Hsndles BSR timer expiry
17414 * Function : rgSCHCmnBsrTmrExpry
17416 * This function is invoked when periodic BSR timer expires for a UE.
17418 * @param[in] RgSchUeCb *ue
17424 PUBLIC S16 rgSCHCmnBsrTmrExpry
17429 PUBLIC S16 rgSCHCmnBsrTmrExpry(ueCb)
17433 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ueCb->cell);
17435 TRC2(rgSCHCmnBsrTmrExpry)
17437 ueCb->isSrGrant = TRUE;
17440 emtcStatsUlBsrTmrTxp++;
17444 if(ueCb->cell->emtcEnable)
17448 cellSch->apisEmtcUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17455 cellSch->apisUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17462 * @brief Short BSR update.
17466 * Function : rgSCHCmnUpdBsrShort
17468 * This functions does requisite updates to handle short BSR reporting.
17470 * @param[in] RgSchCellCb *cell
17471 * @param[in] RgSchUeCb *ue
17472 * @param[in] RgSchLcgCb *ulLcg
17473 * @param[in] U8 bsr
17474 * @param[out] RgSchErrInfo *err
17480 PUBLIC S16 rgSCHCmnUpdBsrShort
17489 PUBLIC S16 rgSCHCmnUpdBsrShort(cell, ue, ulLcg, bsr, err)
17499 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17501 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17502 RgSchCmnLcg *cmnLcg = NULLP;
17507 TRC2(rgSCHCmnUpdBsrShort);
17509 if (!RGSCH_LCG_ISCFGD(ulLcg))
17511 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17514 for (lcgCnt=0; lcgCnt<4; lcgCnt++)
17517 /* Set BS of all other LCGs to Zero.
17518 If Zero BSR is reported in Short BSR include this LCG too */
17519 if ((lcgCnt != ulLcg->lcgId) ||
17520 (!bsr && !ueUl->hqEnt.numBusyHqProcs))
17522 /* If old BO is zero do nothing */
17523 if(((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs != 0)
17525 for(idx = 0; idx < ue->ul.lcgArr[lcgCnt].numLch; idx++)
17527 if((ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount) &&
17528 (ue->ulActiveLCs & (1 <<
17529 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1))))
17532 ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount--;
17533 ue->ulActiveLCs &= ~(1 <<
17534 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1));
17540 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgCnt]))
17542 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs = 0;
17543 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->reportedBs = 0;
17548 if(ulLcg->lcgId && bsr && (((RgSchCmnLcg *)(ulLcg->sch))->bs == 0))
17550 for(idx = 0; idx < ulLcg->numLch; idx++)
17553 if (!(ue->ulActiveLCs & (1 << (ulLcg->lcArray[idx]->qciCb->qci -1))))
17555 ulLcg->lcArray[idx]->qciCb->ulUeCount++;
17556 ue->ulActiveLCs |= (1 << (ulLcg->lcArray[idx]->qciCb->qci -1));
17561 /* Resetting the nonGbrLcgBs info here */
17562 ue->ul.nonGbrLcgBs = 0;
17563 ue->ul.nonLcg0Bs = 0;
17565 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17567 if (TRUE == ue->ul.useExtBSRSizes)
17569 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17573 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17575 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17577 /* TBD check for effGbr != 0 */
17578 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17580 else if (0 == ulLcg->lcgId)
17582 /* This is added for handling LCG0 */
17583 cmnLcg->bs = cmnLcg->reportedBs;
17587 /* Update non GBR LCG's BS*/
17588 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17589 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17591 ue->ul.totalBsr = cmnLcg->bs;
17594 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (bsr == 0))
17596 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17600 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17602 rgSCHCmnSpsBsrRpt(cell, ue, ulLcg);
17605 rgSCHCmnUpdUlCompEffBsr(ue);
17608 if(cell->emtcEnable)
17612 cellSch->apisEmtcUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17619 cellSch->apisUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17623 if (ue->ul.isUlCaEnabled && ue->numSCells)
17625 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17627 #ifndef PAL_ENABLE_UL_CA
17628 if((ue->cellInfo[sCellIdx] != NULLP) &&
17629 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17631 if(ue->cellInfo[sCellIdx] != NULLP)
17634 cellSch->apisUl->rgSCHUpdBsrShort(ue->cellInfo[sCellIdx]->cell,
17645 * @brief Truncated BSR update.
17649 * Function : rgSCHCmnUpdBsrTrunc
17651 * This functions does required updates to handle truncated BSR report.
17654 * @param[in] RgSchCellCb *cell
17655 * @param[in] RgSchUeCb *ue
17656 * @param[in] RgSchLcgCb *ulLcg
17657 * @param[in] U8 bsr
17658 * @param[out] RgSchErrInfo *err
17664 PUBLIC S16 rgSCHCmnUpdBsrTrunc
17673 PUBLIC S16 rgSCHCmnUpdBsrTrunc(cell, ue, ulLcg, bsr, err)
17681 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17682 RgSchCmnLcg *cmnLcg = NULLP;
17688 TRC2(rgSCHCmnUpdBsrTrunc);
17690 if (!RGSCH_LCG_ISCFGD(ulLcg))
17692 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17695 /* set all higher prio lcgs bs to 0 and update this lcgs bs and
17696 total bsr= sumofall lcgs bs */
17699 for (cnt = ulLcg->lcgId-1; cnt >= 0; cnt--)
17702 /* If Existing BO is zero the don't do anything */
17703 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs != 0)
17705 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17708 if((ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount) &&
17709 (ue->ulActiveLCs & (1 <<
17710 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17712 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount--;
17713 ue->ulActiveLCs &= ~(1 <<
17714 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17719 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs = 0;
17720 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->reportedBs = 0;
17725 for (cnt = ulLcg->lcgId; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17727 if (ulLcg->lcgId == 0)
17731 /* If Existing BO is zero the don't do anything */
17732 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs == 0)
17734 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17737 if (!(ue->ulActiveLCs & (1 <<
17738 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17740 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount++;
17741 ue->ulActiveLCs |= (1 <<
17742 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17748 ue->ul.nonGbrLcgBs = 0;
17749 ue->ul.nonLcg0Bs = 0;
17750 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17751 if (TRUE == ue->ul.useExtBSRSizes)
17753 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17757 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17759 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17761 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17763 else if(ulLcg->lcgId == 0)
17765 /* This is for handeling LCG0 */
17766 cmnLcg->bs = cmnLcg->reportedBs;
17770 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
17771 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17773 ue->ul.totalBsr = cmnLcg->bs;
17775 for (cnt = ulLcg->lcgId+1; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17777 /* TODO: The bs for the other LCGs may be stale because some or all of
17778 * the part of bs may have been already scheduled/data received. Please
17779 * consider this when truncated BSR is tested/implemented */
17780 ue->ul.totalBsr += ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs;
17783 rgSCHCmnUpdUlCompEffBsr(ue);
17786 if(cell->emtcEnable)
17790 cellSch->apisEmtcUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17797 cellSch->apisUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17801 if (ue->ul.isUlCaEnabled && ue->numSCells)
17803 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17805 #ifndef PAL_ENABLE_UL_CA
17806 if((ue->cellInfo[sCellIdx] != NULLP) &&
17807 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17809 if(ue->cellInfo[sCellIdx] != NULLP)
17812 cellSch->apisUl->rgSCHUpdBsrTrunc(ue->cellInfo[sCellIdx]->cell, ue, ulLcg, bsr);
17822 * @brief Long BSR update.
17826 * Function : rgSCHCmnUpdBsrLong
17828 * - Update BSRs for all configured LCGs.
17829 * - Update priority of LCGs if needed.
17830 * - Update UE's position within/across uplink scheduling queues.
17833 * @param[in] RgSchCellCb *cell
17834 * @param[in] RgSchUeCb *ue
17835 * @param[in] U8 bsArr[]
17836 * @param[out] RgSchErrInfo *err
17842 PUBLIC S16 rgSCHCmnUpdBsrLong
17850 PUBLIC S16 rgSCHCmnUpdBsrLong(cell, ue, bsArr, err)
17857 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17858 U32 tmpBsArr[4] = {0, 0, 0, 0};
17866 TRC2(rgSCHCmnUpdBsrLong);
17869 for(idx1 = 1; idx1 < RGSCH_MAX_LCG_PER_UE; idx1++)
17871 /* If Old BO is non zero then do nothing */
17872 if ((((RgSchCmnLcg *)(ue->ul.lcgArr[idx1].sch))->bs == 0)
17875 for(idx2 = 0; idx2 < ue->ul.lcgArr[idx1].numLch; idx2++)
17878 if (!(ue->ulActiveLCs & (1 <<
17879 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1))))
17881 ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->ulUeCount++;
17882 ue->ulActiveLCs |= (1 <<
17883 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1));
17889 ue->ul.nonGbrLcgBs = 0;
17890 ue->ul.nonLcg0Bs = 0;
17892 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[0]))
17894 if (TRUE == ue->ul.useExtBSRSizes)
17896 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnExtBsrTbl[bsArr[0]];
17897 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnExtBsrTbl[bsArr[0]];
17898 tmpBsArr[0] = rgSchCmnExtBsrTbl[bsArr[0]];
17902 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnBsrTbl[bsArr[0]];
17903 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnBsrTbl[bsArr[0]];
17904 tmpBsArr[0] = rgSchCmnBsrTbl[bsArr[0]];
17907 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
17909 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
17911 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
17913 if (TRUE == ue->ul.useExtBSRSizes)
17915 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsArr[lcgId]];
17919 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsArr[lcgId]];
17921 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17923 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17924 tmpBsArr[lcgId] = cmnLcg->bs;
17928 nonGbrBs += cmnLcg->reportedBs;
17929 tmpBsArr[lcgId] = cmnLcg->reportedBs;
17930 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17934 ue->ul.nonGbrLcgBs = RGSCH_MIN(nonGbrBs,ue->ul.effAmbr);
17936 ue->ul.totalBsr = tmpBsArr[0] + tmpBsArr[1] + tmpBsArr[2] + tmpBsArr[3];
17938 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (ue->ul.totalBsr == 0))
17940 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17945 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE) /* SPS_FIX */
17947 if(ue->ul.totalBsr - tmpBsArr[1] == 0)
17948 {/* Updaing the BSR to SPS only if LCG1 BS is present in sps active state */
17949 rgSCHCmnSpsBsrRpt(cell, ue, &ue->ul.lcgArr[1]);
17953 rgSCHCmnUpdUlCompEffBsr(ue);
17956 if(cell->emtcEnable)
17960 cellSch->apisEmtcUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17967 cellSch->apisUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17971 if (ue->ul.isUlCaEnabled && ue->numSCells)
17973 for(U8 idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
17975 #ifndef PAL_ENABLE_UL_CA
17976 if((ue->cellInfo[idx] != NULLP) &&
17977 (ue->cellInfo[idx]->sCellState == RG_SCH_SCELL_ACTIVE))
17979 if(ue->cellInfo[idx] != NULLP)
17982 cellSch->apisUl->rgSCHUpdBsrLong(ue->cellInfo[idx]->cell, ue, bsArr);
17992 * @brief PHR update.
17996 * Function : rgSCHCmnUpdExtPhr
17998 * Updates extended power headroom information for an UE.
18000 * @param[in] RgSchCellCb *cell
18001 * @param[in] RgSchUeCb *ue
18002 * @param[in] U8 phr
18003 * @param[out] RgSchErrInfo *err
18009 PUBLIC S16 rgSCHCmnUpdExtPhr
18013 RgInfExtPhrCEInfo *extPhr,
18017 PUBLIC S16 rgSCHCmnUpdExtPhr(cell, ue, extPhr, err)
18020 RgInfExtPhrCEInfo *extPhr;
18024 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18025 RgSchCmnAllocRecord *allRcd;
18026 CmLList *node = ueUl->ulAllocLst.last;
18029 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18031 TRC2(rgSCHCmnUpdExtPhr);
18037 allRcd = (RgSchCmnAllocRecord *)node->node;
18039 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18041 rgSCHPwrUpdExtPhr(cell, ue, extPhr, allRcd);
18046 if(ulSpsUe->isUlSpsActv)
18048 rgSCHCmnSpsPhrInd(cell,ue);
18053 } /* rgSCHCmnUpdExtPhr */
18059 * @brief PHR update.
18063 * Function : rgSCHCmnUpdPhr
18065 * Updates power headroom information for an UE.
18067 * @param[in] RgSchCellCb *cell
18068 * @param[in] RgSchUeCb *ue
18069 * @param[in] U8 phr
18070 * @param[out] RgSchErrInfo *err
18076 PUBLIC S16 rgSCHCmnUpdPhr
18084 PUBLIC S16 rgSCHCmnUpdPhr(cell, ue, phr, err)
18091 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18092 RgSchCmnAllocRecord *allRcd;
18093 CmLList *node = ueUl->ulAllocLst.last;
18096 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18098 TRC2(rgSCHCmnUpdPhr);
18104 allRcd = (RgSchCmnAllocRecord *)node->node;
18106 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18108 rgSCHPwrUpdPhr(cell, ue, phr, allRcd, RG_SCH_CMN_PWR_USE_CFG_MAX_PWR);
18113 if(ulSpsUe->isUlSpsActv)
18115 rgSCHCmnSpsPhrInd(cell,ue);
18120 } /* rgSCHCmnUpdPhr */
18123 * @brief UL grant for contention resolution.
18127 * Function : rgSCHCmnContResUlGrant
18129 * Add UE to another queue specifically for CRNTI based contention
18133 * @param[in] RgSchUeCb *ue
18134 * @param[out] RgSchErrInfo *err
18140 PUBLIC S16 rgSCHCmnContResUlGrant
18147 PUBLIC S16 rgSCHCmnContResUlGrant(cell, ue, err)
18153 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18154 TRC2(rgSCHCmnContResUlGrant);
18157 if(cell->emtcEnable)
18161 cellSch->apisEmtcUl->rgSCHContResUlGrant(cell, ue);
18168 cellSch->apisUl->rgSCHContResUlGrant(cell, ue);
18174 * @brief SR reception handling.
18178 * Function : rgSCHCmnSrRcvd
18180 * - Update UE's position within/across uplink scheduling queues
18181 * - Update priority of LCGs if needed.
18183 * @param[in] RgSchCellCb *cell
18184 * @param[in] RgSchUeCb *ue
18185 * @param[in] CmLteTimingInfo frm
18186 * @param[out] RgSchErrInfo *err
18192 PUBLIC S16 rgSCHCmnSrRcvd
18196 CmLteTimingInfo frm,
18200 PUBLIC S16 rgSCHCmnSrRcvd(cell, ue, frm, err)
18203 CmLteTimingInfo frm;
18207 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18208 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18209 CmLList *node = ueUl->ulAllocLst.last;
18211 TRC2(rgSCHCmnSrRcvd);
18214 emtcStatsUlTomSrInd++;
18217 RGSCH_INCR_SUB_FRAME(frm, 1); /* 1 TTI after the time SR was sent */
18220 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)node->node;
18221 if (RGSCH_TIMEINFO_SAME(frm, allRcd->allocTime))
18227 //TODO_SID Need to check when it is getting triggered
18228 ue->isSrGrant = TRUE;
18230 if(cell->emtcEnable)
18234 cellSch->apisEmtcUl->rgSCHSrRcvd(cell, ue);
18241 cellSch->apisUl->rgSCHSrRcvd(cell, ue);
18247 * @brief Returns first uplink allocation to send reception
18252 * Function: rgSCHCmnFirstRcptnReq(cell)
18253 * Purpose: This function returns the first uplink allocation
18254 * (or NULLP if there is none) in the subframe
18255 * in which is expected to prepare and send reception
18260 * @param[in] RgSchCellCb *cell
18261 * @return RgSchUlAlloc*
18264 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq
18269 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq(cell)
18273 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18275 RgSchUlAlloc* alloc = NULLP;
18277 TRC2(rgSCHCmnFirstRcptnReq);
18279 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18281 RgSchUlSf* sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18282 alloc = rgSCHUtlUlAllocFirst(sf);
18284 if (alloc && alloc->hqProc == NULLP)
18286 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18294 * @brief Returns first uplink allocation to send reception
18299 * Function: rgSCHCmnNextRcptnReq(cell)
18300 * Purpose: This function returns the next uplink allocation
18301 * (or NULLP if there is none) in the subframe
18302 * in which is expected to prepare and send reception
18307 * @param[in] RgSchCellCb *cell
18308 * @return RgSchUlAlloc*
18311 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq
18314 RgSchUlAlloc *alloc
18317 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq(cell, alloc)
18319 RgSchUlAlloc *alloc;
18322 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18324 //RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18326 TRC2(rgSCHCmnNextRcptnReq);
18328 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18330 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18332 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18333 if (alloc && alloc->hqProc == NULLP)
18335 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18346 * @brief Collates DRX enabled UE's scheduled in this SF
18350 * Function: rgSCHCmnDrxStrtInActvTmrInUl(cell)
18351 * Purpose: This function collates the link
18352 * of UE's scheduled in this SF who
18353 * have drx enabled. It then calls
18354 * DRX specific function to start/restart
18355 * inactivity timer in Ul
18359 * @param[in] RgSchCellCb *cell
18363 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl
18368 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl(cell)
18372 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18373 RgSchUlSf *sf = &(cellUl->ulSfArr[cellUl->schdIdx]);
18374 RgSchUlAlloc *alloc = rgSCHUtlUlAllocFirst(sf);
18379 TRC2(rgSCHCmnDrxStrtInActvTmrInUl);
18381 cmLListInit(&ulUeLst);
18389 if (!(alloc->grnt.isRtx) && ueCb->isDrxEnabled && !(ueCb->isSrGrant)
18391 /* ccpu00139513- DRX inactivity timer should not be started for
18392 * UL SPS occasions */
18393 && (alloc->hqProc->isSpsOccnHqP == FALSE)
18397 cmLListAdd2Tail(&ulUeLst,&(ueCb->ulDrxInactvTmrLnk));
18398 ueCb->ulDrxInactvTmrLnk.node = (PTR)ueCb;
18402 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18405 (Void)rgSCHDrxStrtInActvTmr(cell,&ulUeLst,RG_SCH_DRX_UL);
18412 * @brief Returns first uplink allocation to send HARQ feedback
18417 * Function: rgSCHCmnFirstHqFdbkAlloc
18418 * Purpose: This function returns the first uplink allocation
18419 * (or NULLP if there is none) in the subframe
18420 * for which it is expected to prepare and send HARQ
18425 * @param[in] RgSchCellCb *cell
18426 * @param[in] U8 idx
18427 * @return RgSchUlAlloc*
18430 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc
18436 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc(cell, idx)
18441 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18443 RgSchUlAlloc *alloc = NULLP;
18445 TRC2(rgSCHCmnFirstHqFdbkAlloc);
18447 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18449 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18450 alloc = rgSCHUtlUlAllocFirst(sf);
18452 while (alloc && (alloc->hqProc == NULLP))
18454 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18462 * @brief Returns next allocation to send HARQ feedback for.
18466 * Function: rgSCHCmnNextHqFdbkAlloc(cell)
18467 * Purpose: This function returns the next uplink allocation
18468 * (or NULLP if there is none) in the subframe
18469 * for which HARQ feedback needs to be sent.
18473 * @param[in] RgSchCellCb *cell
18474 * @return RgSchUlAlloc*
18477 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc
18480 RgSchUlAlloc *alloc,
18484 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc(cell, alloc, idx)
18486 RgSchUlAlloc *alloc;
18490 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18491 TRC2(rgSCHCmnNextHqFdbkAlloc);
18493 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18495 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18497 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18498 while (alloc && (alloc->hqProc == NULLP))
18500 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18510 /***********************************************************
18512 * Func : rgSCHCmnUlGetITbsFrmIMcs
18514 * Desc : Returns the Itbs that is mapped to an Imcs
18515 * for the case of uplink.
18523 **********************************************************/
18525 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs
18530 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs(iMcs)
18534 TRC2(rgSCHCmnUlGetITbsFrmIMcs);
18536 RETVALUE(rgUlIMcsTbl[iMcs].iTbs);
18539 /***********************************************************
18541 * Func : rgSCHCmnUlGetIMcsFrmITbs
18543 * Desc : Returns the Imcs that is mapped to an Itbs
18544 * for the case of uplink.
18548 * Notes: For iTbs 19, iMcs is dependant on modulation order.
18549 * Refer to 36.213, Table 8.6.1-1 and 36.306 Table 4.1-2
18550 * for UE capability information
18554 **********************************************************/
18556 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs
18559 CmLteUeCategory ueCtg
18562 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg)
18564 CmLteUeCategory ueCtg;
18568 TRC2(rgSCHCmnUlGetIMcsFrmITbs);
18574 /*a higher layer can force a 64QAM UE to transmit at 16QAM.
18575 * We currently do not support this. Once the support for such
18576 * is added, ueCtg should be replaced by current transmit
18577 * modulation configuration.Refer to 36.213 -8.6.1
18579 else if ( iTbs < 19 )
18583 else if ((iTbs == 19) && (ueCtg != CM_LTE_UE_CAT_5))
18593 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
18594 was seen when IMCS exceeds 20 on T2k TDD*/
18604 /***********************************************************
18606 * Func : rgSCHCmnUlMinTbBitsForITbs
18608 * Desc : Returns the minimum number of bits that can
18609 * be given as grant for a specific CQI.
18617 **********************************************************/
18619 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs
18621 RgSchCmnUlCell *cellUl,
18625 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs)
18626 RgSchCmnUlCell *cellUl;
18630 TRC2(rgSCHCmnUlMinTbBitsForITbs);
18632 RGSCH_ARRAY_BOUND_CHECK(0, rgTbSzTbl[0], iTbs);
18634 RETVALUE(rgTbSzTbl[0][iTbs][cellUl->sbSize-1]);
18637 /***********************************************************
18639 * Func : rgSCHCmnUlSbAlloc
18641 * Desc : Given a required 'number of subbands' and a hole,
18642 * returns a suitable alloc such that the subband
18643 * allocation size is valid
18647 * Notes: Does not assume either passed numSb or hole size
18648 * to be valid for allocation, and hence arrives at
18649 * an acceptable value.
18652 **********************************************************/
18654 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc
18661 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc(sf, numSb, hole)
18667 U8 holeSz; /* valid hole size */
18668 RgSchUlAlloc *alloc;
18669 TRC2(rgSCHCmnUlSbAlloc);
18671 if ((holeSz = rgSchCmnMult235Tbl[hole->num].prvMatch) == hole->num)
18673 numSb = rgSchCmnMult235Tbl[numSb].match;
18674 if (numSb >= holeSz)
18676 alloc = rgSCHUtlUlAllocGetCompHole(sf, hole);
18680 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18685 if (numSb < holeSz)
18687 numSb = rgSchCmnMult235Tbl[numSb].match;
18691 numSb = rgSchCmnMult235Tbl[numSb].prvMatch;
18694 if ( numSb >= holeSz )
18698 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18704 * @brief To fill the RgSchCmnUeUlAlloc structure of UeCb.
18708 * Function: rgSCHCmnUlUeFillAllocInfo
18709 * Purpose: Specific scheduler to call this API to fill the alloc
18712 * Invoked by: Scheduler
18714 * @param[in] RgSchCellCb *cell
18715 * @param[out] RgSchUeCb *ue
18719 PUBLIC Void rgSCHCmnUlUeFillAllocInfo
18725 PUBLIC Void rgSCHCmnUlUeFillAllocInfo(cell, ue)
18730 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18731 RgSchCmnUeUlAlloc *ulAllocInfo;
18732 RgSchCmnUlUe *ueUl;
18734 TRC2(rgSCHCmnUlUeFillAllocInfo);
18736 ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18737 ulAllocInfo = &ueUl->alloc;
18739 /* Fill alloc structure */
18740 rgSCHCmnUlAllocFillTpc(cell, ue, ulAllocInfo->alloc);
18741 rgSCHCmnUlAllocFillNdmrs(cellUl, ulAllocInfo->alloc);
18742 rgSCHCmnUlAllocLnkHqProc(ue, ulAllocInfo->alloc, ulAllocInfo->alloc->hqProc,
18743 ulAllocInfo->alloc->hqProc->isRetx);
18745 rgSCHCmnUlFillPdcchWithAlloc(ulAllocInfo->alloc->pdcch,
18746 ulAllocInfo->alloc, ue);
18747 /* Recording information about this allocation */
18748 rgSCHCmnUlRecordUeAlloc(cell, ue);
18750 /* Update the UE's outstanding allocation */
18751 if (!ulAllocInfo->alloc->hqProc->isRetx)
18753 rgSCHCmnUlUpdOutStndAlloc(cell, ue, ulAllocInfo->allocdBytes);
18760 * @brief Update the UEs outstanding alloc based on the BSR report's timing.
18765 * Function: rgSCHCmnUpdUlCompEffBsr
18766 * Purpose: Clear off all the allocations from outstanding allocation that
18767 * are later than or equal to BSR timing information (stored in UEs datIndTime).
18769 * Invoked by: Scheduler
18771 * @param[in] RgSchUeCb *ue
18775 PRIVATE Void rgSCHCmnUpdUlCompEffBsr
18780 PRIVATE Void rgSCHCmnUpdUlCompEffBsr(ue)
18784 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,ue->cell);
18785 CmLList *node = ueUl->ulAllocLst.last;
18786 RgSchCmnAllocRecord *allRcd;
18787 U32 outStndAlloc=0;
18788 U32 nonLcg0OutStndAllocBs=0;
18791 RgSchCmnLcg *cmnLcg = NULLP;
18792 TRC2(rgSCHCmnUpdUlCompEffBsr);
18796 allRcd = (RgSchCmnAllocRecord *)node->node;
18797 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18806 allRcd = (RgSchCmnAllocRecord *)node->node;
18808 outStndAlloc += allRcd->alloc;
18811 cmnLcg = (RgSchCmnLcg *)(ue->ul.lcgArr[0].sch);
18812 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
18813 if (cmnLcg->bs > outStndAlloc)
18815 cmnLcg->bs -= outStndAlloc;
18816 ue->ul.minReqBytes = cmnLcg->bs;
18821 nonLcg0OutStndAllocBs = outStndAlloc - cmnLcg->bs;
18825 for(lcgId = 1;lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
18827 if(RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
18829 cmnLcg = ((RgSchCmnLcg *) (ue->ul.lcgArr[lcgId].sch));
18830 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
18832 nonLcg0Bsr += cmnLcg->bs;
18836 nonLcg0Bsr += ue->ul.nonGbrLcgBs;
18837 if (nonLcg0OutStndAllocBs > nonLcg0Bsr)
18843 nonLcg0Bsr -= nonLcg0OutStndAllocBs;
18845 ue->ul.nonLcg0Bs = nonLcg0Bsr;
18846 /* Cap effBsr with nonLcg0Bsr and append lcg0 bs.
18847 * nonLcg0Bsr limit applies only to lcg1,2,3 */
18848 /* better be handled in individual scheduler */
18849 ue->ul.effBsr = nonLcg0Bsr +\
18850 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
18855 * @brief Records information about the current allocation.
18859 * Function: rgSCHCmnUlRecordUeAlloc
18860 * Purpose: Records information about the curent allocation.
18861 * This includes the allocated bytes, as well
18862 * as some power information.
18864 * Invoked by: Scheduler
18866 * @param[in] RgSchCellCb *cell
18867 * @param[in] RgSchUeCb *ue
18871 PUBLIC Void rgSCHCmnUlRecordUeAlloc
18877 PUBLIC Void rgSCHCmnUlRecordUeAlloc(cell, ue)
18883 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18885 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18886 CmLListCp *lst = &ueUl->ulAllocLst;
18887 CmLList *node = ueUl->ulAllocLst.first;
18888 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18889 RgSchCmnUeUlAlloc *ulAllocInfo = &ueUl->alloc;
18890 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
18891 TRC2(rgSCHCmnUlRecordUeAlloc);
18893 cmLListDelFrm(lst, &allRcd->lnk);
18895 /* To the crntTime, add the MIN time at which UE will
18896 * actually send the BSR i.e DELTA+4 */
18897 allRcd->allocTime = cell->crntTime;
18898 /*ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
18900 if(ue->isEmtcUe == TRUE)
18902 RGSCH_INCR_SUB_FRAME_EMTC(allRcd->allocTime,
18903 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18908 RGSCH_INCR_SUB_FRAME(allRcd->allocTime,
18909 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18912 allRcd->allocTime = cellUl->schdTime;
18914 cmLListAdd2Tail(lst, &allRcd->lnk);
18916 /* Filling in the parameters to be recorded */
18917 allRcd->alloc = ulAllocInfo->allocdBytes;
18918 //allRcd->numRb = ulAllocInfo->alloc->grnt.numRb;
18919 allRcd->numRb = (ulAllocInfo->alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
18920 /*Recording the UL CQI derived from the maxUlCqi */
18921 allRcd->cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
18922 allRcd->tpc = ulAllocInfo->alloc->grnt.tpc;
18924 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18926 cell->measurements.ulBytesCnt += ulAllocInfo->allocdBytes;
18931 /** PHR handling for MSG3
18932 * @brief Records allocation information of msg3 in the the UE.
18936 * Function: rgSCHCmnUlRecMsg3Alloc
18937 * Purpose: Records information about msg3 allocation.
18938 * This includes the allocated bytes, as well
18939 * as some power information.
18941 * Invoked by: Scheduler
18943 * @param[in] RgSchCellCb *cell
18944 * @param[in] RgSchUeCb *ue
18945 * @param[in] RgSchRaCb *raCb
18949 PUBLIC Void rgSCHCmnUlRecMsg3Alloc
18956 PUBLIC Void rgSCHCmnUlRecMsg3Alloc(cell, ue, raCb)
18962 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18963 CmLListCp *lst = &ueUl->ulAllocLst;
18964 CmLList *node = ueUl->ulAllocLst.first;
18965 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18967 /* Stack Crash problem for TRACE5 changes */
18968 TRC2(rgSCHCmnUlRecMsg3Alloc);
18970 cmLListDelFrm(lst, node);
18971 allRcd->allocTime = raCb->msg3AllocTime;
18972 cmLListAdd2Tail(lst, node);
18974 /* Filling in the parameters to be recorded */
18975 allRcd->alloc = raCb->msg3Grnt.datSz;
18976 allRcd->numRb = raCb->msg3Grnt.numRb;
18977 allRcd->cqi = raCb->ccchCqi;
18978 allRcd->tpc = raCb->msg3Grnt.tpc;
18980 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18985 * @brief Keeps track of the most recent RG_SCH_CMN_MAX_ALLOC_TRACK
18986 * allocations to track. Adds this allocation to the ueUl's ulAllocLst.
18991 * Function: rgSCHCmnUlUpdOutStndAlloc
18992 * Purpose: Recent Allocation shall be at First Pos'n.
18993 * Remove the last node, update the fields
18994 * with the new allocation and add at front.
18996 * Invoked by: Scheduler
18998 * @param[in] RgSchCellCb *cell
18999 * @param[in] RgSchUeCb *ue
19000 * @param[in] U32 alloc
19004 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc
19011 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc(cell, ue, alloc)
19017 U32 nonLcg0Alloc=0;
19018 TRC2(rgSCHCmnUlUpdOutStndAlloc);
19020 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
19021 if (((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs > alloc)
19023 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs -= alloc;
19027 nonLcg0Alloc = alloc - ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19028 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = 0;
19031 if (nonLcg0Alloc >= ue->ul.nonLcg0Bs)
19033 ue->ul.nonLcg0Bs = 0;
19037 ue->ul.nonLcg0Bs -= nonLcg0Alloc;
19039 /* Cap effBsr with effAmbr and append lcg0 bs.
19040 * effAmbr limit applies only to lcg1,2,3 non GBR LCG's*/
19041 /* better be handled in individual scheduler */
19042 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
19043 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19045 if (ue->ul.effBsr == 0)
19047 if (ue->bsrTmr.tmrEvnt != TMR_NONE)
19049 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
19052 if (FALSE == ue->isSrGrant)
19054 if (ue->ul.bsrTmrCfg.isPrdBsrTmrPres)
19057 rgSCHTmrStartTmr(cell, ue, RG_SCH_TMR_BSR,
19058 ue->ul.bsrTmrCfg.prdBsrTmr);
19064 /* Resetting UEs lower Cap */
19065 ue->ul.minReqBytes = 0;
19072 * @brief Returns the "Itbs" for a given UE.
19076 * Function: rgSCHCmnUlGetITbs
19077 * Purpose: This function returns the "Itbs" for a given UE.
19079 * Invoked by: Scheduler
19081 * @param[in] RgSchUeCb *ue
19085 PUBLIC U8 rgSCHCmnUlGetITbs
19092 PUBLIC U8 rgSCHCmnUlGetITbs(cell, ue, isEcp)
19098 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
19099 /* CQI will be capped to maxUlCqi for 16qam UEs */
19100 CmLteUeCategory ueCtgy = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
19104 U8 maxiTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ueUl->maxUlCqi];
19107 TRC2(rgSCHCmnUlGetITbs);
19109 /* #ifdef RG_SCH_CMN_EXT_CP_SUP For ECP pick index 1 */
19111 if ( (ueCtgy != CM_LTE_UE_CAT_5) &&
19112 (ueUl->validUlCqi > ueUl->maxUlCqi)
19115 cqi = ueUl->maxUlCqi;
19119 cqi = ueUl->validUlCqi;
19123 iTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
19125 RG_SCH_CHK_ITBS_RANGE(iTbs, maxiTbs);
19127 iTbs = RGSCH_MIN(iTbs, ue->cell->thresholds.maxUlItbs);
19130 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
19131 was seen when IMCS exceeds 20 on T2k TDD */
19140 if ( (ueCtgy != CM_LTE_UE_CAT_5) && (ueUl->crntUlCqi[0] > ueUl->maxUlCqi ))
19142 cqi = ueUl->maxUlCqi;
19146 cqi = ueUl->crntUlCqi[0];
19149 RETVALUE(rgSchCmnUlCqiToTbsTbl[(U8)isEcp][cqi]);
19153 * @brief This function adds the UE to DLRbAllocInfo TX lst.
19157 * Function: rgSCHCmnDlRbInfoAddUeTx
19158 * Purpose: This function adds the UE to DLRbAllocInfo TX lst.
19160 * Invoked by: Common Scheduler
19162 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19163 * @param[in] RgSchUeCb *ue
19164 * @param[in] RgSchDlHqProcCb *hqP
19169 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx
19172 RgSchCmnDlRbAllocInfo *allocInfo,
19174 RgSchDlHqProcCb *hqP
19177 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx(cell, allocInfo, ue, hqP)
19179 RgSchCmnDlRbAllocInfo *allocInfo;
19181 RgSchDlHqProcCb *hqP;
19184 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
19186 TRC2(rgSCHCmnDlRbInfoAddUeTx);
19188 if (hqP->reqLnk.node == NULLP)
19190 if (cellSch->dl.isDlFreqSel)
19192 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19193 &allocInfo->dedAlloc.txHqPLst, hqP);
19198 cmLListAdd2Tail(&allocInfo->dedAlloc.txHqPLst, &hqP->reqLnk);
19200 hqP->reqLnk.node = (PTR)hqP;
19207 * @brief This function adds the UE to DLRbAllocInfo RETX lst.
19211 * Function: rgSCHCmnDlRbInfoAddUeRetx
19212 * Purpose: This function adds the UE to DLRbAllocInfo RETX lst.
19214 * Invoked by: Common Scheduler
19216 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19217 * @param[in] RgSchUeCb *ue
19218 * @param[in] RgSchDlHqProcCb *hqP
19223 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx
19226 RgSchCmnDlRbAllocInfo *allocInfo,
19228 RgSchDlHqProcCb *hqP
19231 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx(cell, allocInfo, ue, hqP)
19233 RgSchCmnDlRbAllocInfo *allocInfo;
19235 RgSchDlHqProcCb *hqP;
19238 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19240 TRC2(rgSCHCmnDlRbInfoAddUeRetx);
19242 if (cellSch->dl.isDlFreqSel)
19244 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19245 &allocInfo->dedAlloc.retxHqPLst, hqP);
19249 /* checking UE's presence in this lst is unnecessary */
19250 cmLListAdd2Tail(&allocInfo->dedAlloc.retxHqPLst, &hqP->reqLnk);
19251 hqP->reqLnk.node = (PTR)hqP;
19257 * @brief This function adds the UE to DLRbAllocInfo TX-RETX lst.
19261 * Function: rgSCHCmnDlRbInfoAddUeRetxTx
19262 * Purpose: This adds the UE to DLRbAllocInfo TX-RETX lst.
19264 * Invoked by: Common Scheduler
19266 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19267 * @param[in] RgSchUeCb *ue
19268 * @param[in] RgSchDlHqProcCb *hqP
19273 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx
19276 RgSchCmnDlRbAllocInfo *allocInfo,
19278 RgSchDlHqProcCb *hqP
19281 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx(allocInfo, ue, hqP)
19283 RgSchCmnDlRbAllocInfo *allocInfo;
19285 RgSchDlHqProcCb *hqP;
19288 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19290 TRC2(rgSCHCmnDlRbInfoAddUeRetxTx);
19292 if (cellSch->dl.isDlFreqSel)
19294 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19295 &allocInfo->dedAlloc.txRetxHqPLst, hqP);
19299 cmLListAdd2Tail(&allocInfo->dedAlloc.txRetxHqPLst, &hqP->reqLnk);
19300 hqP->reqLnk.node = (PTR)hqP;
19306 * @brief This function adds the UE to DLRbAllocInfo NonSchdRetxLst.
19310 * Function: rgSCHCmnDlAdd2NonSchdRetxLst
19311 * Purpose: During RB estimation for RETX, if allocation fails
19312 * then appending it to NonSchdRetxLst, the further
19313 * action is taken as part of Finalization in
19314 * respective schedulers.
19316 * Invoked by: Common Scheduler
19318 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19319 * @param[in] RgSchUeCb *ue
19320 * @param[in] RgSchDlHqProcCb *hqP
19325 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst
19327 RgSchCmnDlRbAllocInfo *allocInfo,
19329 RgSchDlHqProcCb *hqP
19332 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst(allocInfo, ue, hqP)
19333 RgSchCmnDlRbAllocInfo *allocInfo;
19335 RgSchDlHqProcCb *hqP;
19338 CmLList *schdLnkNode;
19340 TRC2(rgSCHCmnDlAdd2NonSchdRetxLst);
19343 if ( (hqP->sch != (RgSchCmnDlHqProc *)NULLP) &&
19344 (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP)))
19350 schdLnkNode = &hqP->schdLstLnk;
19351 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
19352 cmLListAdd2Tail(&allocInfo->dedAlloc.nonSchdRetxHqPLst, schdLnkNode);
19360 * @brief This function adds the UE to DLRbAllocInfo NonSchdTxRetxLst.
19364 * Function: rgSCHCmnDlAdd2NonSchdTxRetxLst
19365 * Purpose: During RB estimation for TXRETX, if allocation fails
19366 * then appending it to NonSchdTxRetxLst, the further
19367 * action is taken as part of Finalization in
19368 * respective schedulers.
19370 * Invoked by: Common Scheduler
19372 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19373 * @param[in] RgSchUeCb *ue
19374 * @param[in] RgSchDlHqProcCb *hqP
19380 * @brief This function handles the initialisation of DL HARQ/ACK feedback
19381 * timing information for eaach DL subframe.
19385 * Function: rgSCHCmnDlANFdbkInit
19386 * Purpose: Each DL subframe stores the sfn and subframe
19387 * information of UL subframe in which it expects
19388 * HARQ ACK/NACK feedback for this subframe.It
19389 * generates the information based on Downlink
19390 * Association Set Index table.
19392 * Invoked by: Scheduler
19394 * @param[in] RgSchCellCb* cell
19399 PRIVATE S16 rgSCHCmnDlANFdbkInit
19404 PRIVATE S16 rgSCHCmnDlANFdbkInit(cell)
19409 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19410 U8 maxDlSubfrms = cell->numDlSubfrms;
19417 RgSchTddSubfrmInfo ulSubfrmInfo;
19420 TRC2(rgSCHCmnDlANFdbkInit);
19422 ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19423 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19425 /* Generate HARQ ACK/NACK feedback information for each DL sf in a radio frame
19426 * Calculate this information based on DL Association set Index table */
19427 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19429 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19430 RG_SCH_TDD_UL_SUBFRAME)
19432 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19436 for(idx=0; idx < rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19437 numFdbkSubfrms; idx++)
19439 calcSfNum = sfNum - rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19443 calcSfnOffset = RGSCH_CEIL(-calcSfNum, RGSCH_NUM_SUB_FRAMES);
19450 calcSfNum = ((RGSCH_NUM_SUB_FRAMES * calcSfnOffset) + calcSfNum)\
19451 % RGSCH_NUM_SUB_FRAMES;
19453 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19457 else if((ulSubfrmInfo.switchPoints == 2) && (calcSfNum <= \
19458 RG_SCH_CMN_SPL_SUBFRM_6))
19460 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19464 dlIdx = calcSfNum - maxUlSubfrms;
19467 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = sfNum;
19468 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = calcSfnOffset;
19469 cell->subFrms[dlIdx]->dlFdbkInfo.m = idx;
19471 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19474 /* DL subframes in the subsequent radio frames are initialized
19475 * with the previous radio frames */
19476 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;\
19479 sfNum = dlIdx - rgSchTddNumDlSubfrmTbl[ulDlCfgIdx]\
19480 [RGSCH_NUM_SUB_FRAMES-1];
19481 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = \
19482 cell->subFrms[sfNum]->dlFdbkInfo.subframe;
19483 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = \
19484 cell->subFrms[sfNum]->dlFdbkInfo.sfnOffset;
19485 cell->subFrms[dlIdx]->dlFdbkInfo.m = cell->subFrms[sfNum]->dlFdbkInfo.m;
19491 * @brief This function handles the initialization of uplink association
19492 * set information for each DL subframe.
19497 * Function: rgSCHCmnDlKdashUlAscInit
19498 * Purpose: Each DL sf stores the sfn and sf information of UL sf
19499 * in which it expects HQ ACK/NACK trans. It generates the information
19500 * based on k` in UL association set index table.
19502 * Invoked by: Scheduler
19504 * @param[in] RgSchCellCb* cell
19509 PRIVATE S16 rgSCHCmnDlKdashUlAscInit
19514 PRIVATE S16 rgSCHCmnDlKdashUlAscInit(cell)
19519 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19520 U8 maxDlSubfrms = cell->numDlSubfrms;
19526 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19527 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19528 [RGSCH_NUM_SUB_FRAMES-1];
19531 TRC2(rgSCHCmnDlKdashUlAscInit);
19533 /* Generate ACK/NACK offset information for each DL subframe in a radio frame
19534 * Calculate this information based on K` in UL Association Set table */
19535 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19537 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19538 RG_SCH_TDD_UL_SUBFRAME)
19540 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19544 calcSfNum = (sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum] + \
19545 RGSCH_NUM_SUB_FRAMES) % RGSCH_NUM_SUB_FRAMES;
19546 calcSfnOffset = sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum];
19547 if(calcSfnOffset < 0)
19549 calcSfnOffset = RGSCH_CEIL(-calcSfnOffset, RGSCH_NUM_SUB_FRAMES);
19556 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19560 else if((ulSubfrmInfo.switchPoints == 2) &&
19561 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19563 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19567 dlIdx = calcSfNum - maxUlSubfrms;
19570 cell->subFrms[dlIdx]->ulAscInfo.subframe = sfNum;
19571 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset = calcSfnOffset;
19573 /* set dlIdx for which ulAscInfo is updated */
19574 dlPres = dlPres | (1 << dlIdx);
19575 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19578 /* Set Invalid information for which ulAscInfo is not present */
19580 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19583 /* If dlPres is 0, ulAscInfo is not present in that DL index */
19584 if(! ((dlPres >> sfCount)&0x01))
19586 cell->subFrms[sfCount]->ulAscInfo.sfnOffset =
19587 RGSCH_INVALID_INFO;
19588 cell->subFrms[sfCount]->ulAscInfo.subframe =
19589 RGSCH_INVALID_INFO;
19593 /* DL subframes in the subsequent radio frames are initialized
19594 * with the previous radio frames */
19595 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;
19599 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19600 cell->subFrms[dlIdx]->ulAscInfo.subframe =
19601 cell->subFrms[sfNum]->ulAscInfo.subframe;
19602 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset =
19603 cell->subFrms[sfNum]->ulAscInfo.sfnOffset;
19610 * @brief This function initialises the 'Np' value for 'p'
19614 * Function: rgSCHCmnDlNpValInit
19615 * Purpose: To initialise the 'Np' value for each 'p'. It is used
19616 * to find the mapping between nCCE and 'p' and used in
19617 * HARQ ACK/NACK reception.
19619 * Invoked by: Scheduler
19621 * @param[in] RgSchCellCb* cell
19626 PRIVATE S16 rgSCHCmnDlNpValInit
19631 PRIVATE S16 rgSCHCmnDlNpValInit(cell)
19637 TRC2(rgSCHCmnDlNpValInit);
19639 /* Always Np is 0 for p=0 */
19640 cell->rgSchTddNpValTbl[0] = 0;
19642 for(idx=1; idx < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; idx++)
19644 np = cell->bwCfg.dlTotalBw * (idx * RG_SCH_CMN_NUM_SUBCAR - 4);
19645 cell->rgSchTddNpValTbl[idx] = (U8) (np/36);
19652 * @brief This function handles the creation of RACH preamble
19653 * list to queue the preambles and process at the scheduled
19658 * Function: rgSCHCmnDlCreateRachPrmLst
19659 * Purpose: To create RACH preamble list based on RA window size.
19660 * It is used to queue the preambles and process it at the
19663 * Invoked by: Scheduler
19665 * @param[in] RgSchCellCb* cell
19670 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst
19675 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst(cell)
19683 TRC2(rgSCHCmnDlCreateRachPrmLst);
19685 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19687 lstSize = raArrSz * RGSCH_MAX_RA_RNTI_PER_SUBFRM * RGSCH_NUM_SUB_FRAMES;
19689 cell->raInfo.maxRaSize = raArrSz;
19690 ret = rgSCHUtlAllocSBuf(cell->instIdx,
19691 (Data **)(&cell->raInfo.raReqLst), (Size)(lstSize * sizeof(CmLListCp)));
19697 cell->raInfo.lstSize = lstSize;
19704 * @brief This function handles the initialization of RACH Response
19705 * information at each DL subframe.
19709 * Function: rgSCHCmnDlRachInfoInit
19710 * Purpose: Each DL subframe stores the sfn and subframe information of
19711 * possible RACH response allowed for UL subframes. It generates
19712 * the information based on PRACH configuration.
19714 * Invoked by: Scheduler
19716 * @param[in] RgSchCellCb* cell
19721 PRIVATE S16 rgSCHCmnDlRachInfoInit
19726 PRIVATE S16 rgSCHCmnDlRachInfoInit(cell)
19731 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19734 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19735 [RGSCH_NUM_SUB_FRAMES-1];
19737 RgSchTddRachRspLst rachRspLst[3][RGSCH_NUM_SUB_FRAMES];
19745 RgSchTddRachDelInfo *delInfo;
19749 TRC2(rgSCHCmnDlRachInfoInit);
19751 cmMemset((U8 *)rachRspLst, 0, sizeof(rachRspLst));
19753 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19755 /* Include Special subframes */
19756 maxUlSubfrms = maxUlSubfrms + \
19757 rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx].switchPoints;
19758 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19760 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] ==
19761 RG_SCH_TDD_DL_SUBFRAME)
19763 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19767 startWin = (sfNum + RG_SCH_CMN_RARSP_WAIT_PRD + \
19768 ((RgSchCmnCell *)cell->sc.sch)->dl.numRaSubFrms);
19769 endWin = (startWin + cell->rachCfg.raWinSize - 1);
19771 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][startWin%RGSCH_NUM_SUB_FRAMES];
19772 /* Find the next DL subframe starting from Subframe 0 */
19773 if((startSubfrmIdx % RGSCH_NUM_SUB_FRAMES) == 0)
19775 startWin = RGSCH_CEIL(startWin, RGSCH_NUM_SUB_FRAMES);
19776 startWin = startWin * RGSCH_NUM_SUB_FRAMES;
19780 rgSchTddLowDlSubfrmIdxTbl[ulDlCfgIdx][endWin%RGSCH_NUM_SUB_FRAMES];
19781 endWin = (endWin/RGSCH_NUM_SUB_FRAMES) * RGSCH_NUM_SUB_FRAMES \
19783 if(startWin > endWin)
19787 /* Find all the possible RACH Response transmission
19788 * time within the RA window size */
19789 startSubfrmIdx = startWin%RGSCH_NUM_SUB_FRAMES;
19790 for(sfnIdx = startWin/RGSCH_NUM_SUB_FRAMES;
19791 sfnIdx <= endWin/RGSCH_NUM_SUB_FRAMES; sfnIdx++)
19793 if(sfnIdx == endWin/RGSCH_NUM_SUB_FRAMES)
19795 endSubfrmIdx = endWin%RGSCH_NUM_SUB_FRAMES;
19799 endSubfrmIdx = RGSCH_NUM_SUB_FRAMES-1;
19802 /* Find all the possible RACH Response transmission
19803 * time within radio frame */
19804 for(subfrmIdx = startSubfrmIdx;
19805 subfrmIdx <= endSubfrmIdx; subfrmIdx++)
19807 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][subfrmIdx] ==
19808 RG_SCH_TDD_UL_SUBFRAME)
19812 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
19813 /* Find the next DL subframe starting from Subframe 0 */
19814 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
19818 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx], subfrmIdx);
19820 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
19821 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset = sfnIdx;
19822 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].subframe[numSubfrms]
19824 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms++;
19826 startSubfrmIdx = RG_SCH_CMN_SUBFRM_0;
19828 /* Update the subframes to be deleted at this subframe */
19829 /* Get the subframe after the end of RA window size */
19832 sfnOffset = endWin/RGSCH_NUM_SUB_FRAMES;
19835 sfnOffset += raArrSz;
19837 sfnIdx = (endWin/RGSCH_NUM_SUB_FRAMES) % raArrSz;
19839 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx],endSubfrmIdx-1);
19840 if((endSubfrmIdx == RGSCH_NUM_SUB_FRAMES) ||
19841 (rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx] ==
19842 RGSCH_NUM_SUB_FRAMES))
19845 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][RG_SCH_CMN_SUBFRM_0];
19849 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx];
19852 delInfo = &rachRspLst[sfnIdx][subfrmIdx].delInfo;
19853 delInfo->sfnOffset = sfnOffset;
19854 delInfo->subframe[delInfo->numSubfrms] = sfNum;
19855 delInfo->numSubfrms++;
19857 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19860 ret = rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz);
19870 * @brief This function handles the initialization of PHICH information
19871 * for each DL subframe based on PHICH table.
19875 * Function: rgSCHCmnDlPhichOffsetInit
19876 * Purpose: Each DL subf stores the sfn and subf information of UL subframe
19877 * for which it trnsmts PHICH in this subframe. It generates the information
19878 * based on PHICH table.
19880 * Invoked by: Scheduler
19882 * @param[in] RgSchCellCb* cell
19887 PRIVATE S16 rgSCHCmnDlPhichOffsetInit
19892 PRIVATE S16 rgSCHCmnDlPhichOffsetInit(cell)
19897 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19898 U8 maxDlSubfrms = cell->numDlSubfrms;
19905 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19906 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19907 [RGSCH_NUM_SUB_FRAMES-1];
19909 TRC2(rgSCHCmnDlPhichOffsetInit);
19911 /* Generate PHICH offset information for each DL subframe in a radio frame
19912 * Calculate this information based on K in PHICH table */
19913 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19915 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19916 RG_SCH_TDD_UL_SUBFRAME)
19918 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19922 calcSfNum = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) % \
19923 RGSCH_NUM_SUB_FRAMES;
19924 calcSfnOffset = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) / \
19925 RGSCH_NUM_SUB_FRAMES;
19927 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19931 else if((ulSubfrmInfo.switchPoints == 2) &&
19932 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19934 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19938 dlIdx = calcSfNum - maxUlSubfrms;
19941 cell->subFrms[dlIdx]->phichOffInfo.subframe = sfNum;
19942 cell->subFrms[dlIdx]->phichOffInfo.numSubfrms = 1;
19944 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset = calcSfnOffset;
19946 /* set dlIdx for which phich offset is updated */
19947 dlPres = dlPres | (1 << dlIdx);
19948 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19951 /* Set Invalid information for which phich offset is not present */
19953 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19956 /* If dlPres is 0, phich offset is not present in that DL index */
19957 if(! ((dlPres >> sfCount)&0x01))
19959 cell->subFrms[sfCount]->phichOffInfo.sfnOffset =
19960 RGSCH_INVALID_INFO;
19961 cell->subFrms[sfCount]->phichOffInfo.subframe =
19962 RGSCH_INVALID_INFO;
19963 cell->subFrms[sfCount]->phichOffInfo.numSubfrms = 0;
19967 /* DL subframes in the subsequent radio frames are
19968 * initialized with the previous radio frames */
19969 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms;
19970 dlIdx < maxDlSubfrms; dlIdx++)
19973 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19975 cell->subFrms[dlIdx]->phichOffInfo.subframe =
19976 cell->subFrms[sfNum]->phichOffInfo.subframe;
19978 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset =
19979 cell->subFrms[sfNum]->phichOffInfo.sfnOffset;
19986 * @brief Updation of Sch vars per TTI.
19990 * Function: rgSCHCmnUpdVars
19991 * Purpose: Updation of Sch vars per TTI.
19993 * @param[in] RgSchCellCb *cell
19998 PUBLIC Void rgSCHCmnUpdVars
20003 PUBLIC Void rgSCHCmnUpdVars(cell)
20007 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
20008 CmLteTimingInfo timeInfo;
20011 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
20014 TRC2(rgSCHCmnUpdVars);
20016 /* ccpu00132654-ADD- Initializing all the indices in every subframe*/
20017 rgSCHCmnInitVars(cell);
20019 idx = (cell->crntTime.slot + TFU_ULCNTRL_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20020 /* Calculate the UL scheduling subframe idx based on the
20022 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][idx] != 0)
20024 /* PUSCH transmission is based on offset from DL
20025 * PDCCH scheduling */
20026 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20027 ulSubframe = rgSchTddPuschTxKTbl[ulDlCfgIdx][timeInfo.subframe];
20028 /* Add the DCI-0 to PUSCH time to get the time of UL subframe */
20029 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, ulSubframe);
20031 cellUl->schdTti = timeInfo.sfn * 10 + timeInfo.subframe;
20033 /* Fetch the corresponding UL subframe Idx in UL sf array */
20034 cellUl->schdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20035 /* Fetch the corresponding UL Harq Proc ID */
20036 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20037 cellUl->schdTime = timeInfo;
20039 Mval = rgSchTddPhichMValTbl[ulDlCfgIdx][idx];
20042 /* Fetch the tx time for DL HIDCI-0 */
20043 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20044 /* Fetch the corresponding n-k tx time of PUSCH */
20045 cellUl->hqFdbkIdx[0] = rgSCHCmnGetPhichUlSfIdx(&timeInfo, cell);
20046 /* Retx will happen according to the Pusch k table */
20047 cellUl->reTxIdx[0] = cellUl->schdIdx;
20049 if(ulDlCfgIdx == 0)
20051 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[0] */
20052 cellUl->reTxIdx[0] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20053 cellUl->hqFdbkIdx[0]);
20056 /* At Idx 1 store the UL SF adjacent(left) to the UL SF
20058 cellUl->hqFdbkIdx[1] = (cellUl->hqFdbkIdx[0]-1 +
20059 cellUl->numUlSubfrms) % cellUl->numUlSubfrms;
20060 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[1] */
20061 cellUl->reTxIdx[1] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20062 cellUl->hqFdbkIdx[1]);
20067 idx = (cell->crntTime.slot + TFU_RECPREQ_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20068 if (rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] == RG_SCH_TDD_UL_SUBFRAME)
20070 RGSCHCMNADDTOCRNTTIME(cell->crntTime, timeInfo, TFU_RECPREQ_DLDELTA)
20071 cellUl->rcpReqIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20073 idx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) % RGSCH_NUM_SUB_FRAMES;
20075 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
20076 special subframe */
20077 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] != RG_SCH_TDD_UL_SUBFRAME)
20079 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20080 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20081 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, msg3Subfrm);
20082 cellUl->msg3SchdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20083 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20086 if(!rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][idx])
20088 cellUl->spsUlRsrvIdx = RGSCH_INVALID_INFO;
20092 /* introduce some reuse with above code? */
20094 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20095 //offst = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20096 offst = rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][timeInfo.subframe];
20097 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, offst);
20098 cellUl->spsUlRsrvIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20099 /* The harq proc continues to be accessed and used the same delta before
20100 * actual data occurance, and hence use the same idx */
20101 cellUl->spsUlRsrvHqProcIdx = cellUl->schdHqProcIdx;
20105 /* RACHO: update cmn sched specific RACH variables,
20106 * mainly the prachMaskIndex */
20107 rgSCHCmnUpdRachParam(cell);
20113 * @brief To get 'p' value from nCCE.
20117 * Function: rgSCHCmnGetPValFrmCCE
20118 * Purpose: Gets 'p' value for HARQ ACK/NACK reception from CCE.
20120 * @param[in] RgSchCellCb *cell
20121 * @param[in] U8 cce
20126 PUBLIC U8 rgSCHCmnGetPValFrmCCE
20132 PUBLIC U8 rgSCHCmnGetPValFrmCCE(cell, cce)
20138 TRC2(rgSCHCmnGetPValFrmCCE);
20140 for(i=1; i < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; i++)
20142 if(cce < cell->rgSchTddNpValTbl[i])
20151 /***********************************************************
20153 * Func : rgSCHCmnUlAdapRetx
20155 * Desc : Adaptive retransmission for an allocation.
20163 **********************************************************/
20165 PRIVATE Void rgSCHCmnUlAdapRetx
20167 RgSchUlAlloc *alloc,
20168 RgSchUlHqProcCb *proc
20171 PRIVATE Void rgSCHCmnUlAdapRetx(alloc, proc)
20172 RgSchUlAlloc *alloc;
20173 RgSchUlHqProcCb *proc;
20176 TRC2(rgSCHCmnUlAdapRetx);
20178 rgSCHUhmRetx(proc, alloc);
20180 if (proc->rvIdx != 0)
20182 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[proc->rvIdx];
20187 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
20193 * @brief Scheduler invocation per TTI.
20197 * Function: rgSCHCmnHdlUlInactUes
20200 * Invoked by: Common Scheduler
20202 * @param[in] RgSchCellCb *cell
20206 PRIVATE Void rgSCHCmnHdlUlInactUes
20211 PRIVATE Void rgSCHCmnHdlUlInactUes(cell)
20215 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20216 CmLListCp ulInactvLst;
20217 TRC2(rgSCHCmnHdlUlInactUes);
20218 /* Get a List of Inactv UEs for UL*/
20219 cmLListInit(&ulInactvLst);
20221 /* Trigger Spfc Schedulers with Inactive UEs */
20222 rgSCHMeasGapANRepGetUlInactvUe (cell, &ulInactvLst);
20223 /* take care of this in UL retransmission */
20224 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInactvLst);
20230 * @brief Scheduler invocation per TTI.
20234 * Function: rgSCHCmnHdlDlInactUes
20237 * Invoked by: Common Scheduler
20239 * @param[in] RgSchCellCb *cell
20243 PRIVATE Void rgSCHCmnHdlDlInactUes
20248 PRIVATE Void rgSCHCmnHdlDlInactUes(cell)
20252 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20253 CmLListCp dlInactvLst;
20254 TRC2(rgSCHCmnHdlDlInactUes);
20255 /* Get a List of Inactv UEs for DL */
20256 cmLListInit(&dlInactvLst);
20258 /* Trigger Spfc Schedulers with Inactive UEs */
20259 rgSCHMeasGapANRepGetDlInactvUe (cell, &dlInactvLst);
20261 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInactvLst);
20265 /* RACHO: Rach handover functions start here */
20266 /***********************************************************
20268 * Func : rgSCHCmnUeIdleExdThrsld
20270 * Desc : RETURN ROK if UE has been idle more
20279 **********************************************************/
20281 PRIVATE S16 rgSCHCmnUeIdleExdThrsld
20287 PRIVATE S16 rgSCHCmnUeIdleExdThrsld(cell, ue)
20292 /* Time difference in subframes */
20293 U32 sfDiff = RGSCH_CALC_SF_DIFF(cell->crntTime, ue->ul.ulTransTime);
20295 TRC2(rgSCHCmnUeIdleExdThrsld);
20297 if (sfDiff > (U32)RG_SCH_CMN_UE_IDLE_THRSLD(ue))
20309 * @brief Scheduler processing for Ded Preambles on cell configuration.
20313 * Function : rgSCHCmnCfgRachDedPrm
20315 * This function does requisite initialisation
20316 * for RACH Ded Preambles.
20319 * @param[in] RgSchCellCb *cell
20323 PRIVATE Void rgSCHCmnCfgRachDedPrm
20328 PRIVATE Void rgSCHCmnCfgRachDedPrm(cell)
20332 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20333 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20336 TRC2(rgSCHCmnCfgRachDedPrm);
20338 if (cell->macPreambleSet.pres == NOTPRSNT)
20342 cellSch->rachCfg.numDedPrm = cell->macPreambleSet.size;
20343 cellSch->rachCfg.dedPrmStart = cell->macPreambleSet.start;
20344 /* Initialize handover List */
20345 cmLListInit(&cellSch->rachCfg.hoUeLst);
20346 /* Initialize pdcch Order List */
20347 cmLListInit(&cellSch->rachCfg.pdcchOdrLst);
20349 /* Intialize the rapId to UE mapping structure */
20350 for (cnt = 0; cnt<cellSch->rachCfg.numDedPrm; cnt++)
20352 cellSch->rachCfg.rapIdMap[cnt].rapId = cellSch->rachCfg.dedPrmStart + \
20354 cmLListInit(&cellSch->rachCfg.rapIdMap[cnt].assgndUes);
20356 /* Perform Prach Mask Idx, remDedPrm, applFrm initializations */
20357 /* Set remDedPrm as numDedPrm */
20358 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20359 /* Initialize applFrm */
20360 cellSch->rachCfg.prachMskIndx = 0;
20361 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_EVEN)
20363 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + \
20364 (cell->crntTime.sfn % 2)) % RGSCH_MAX_SFN;
20367 else if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ODD)
20369 if((cell->crntTime.sfn%2) == 0)
20371 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + 1)\
20378 cellSch->rachCfg.applFrm.sfn = cell->crntTime.sfn;
20380 /* Initialize cellSch->rachCfg.applFrm as >= crntTime.
20381 * This is because of RGSCH_CALC_SF_DIFF logic */
20382 if (cellSch->rachCfg.applFrm.sfn == cell->crntTime.sfn)
20384 while (cellSch->rachCfg.prachMskIndx < cell->rachCfg.raOccasion.size)
20386 if (cell->crntTime.slot <\
20387 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx])
20391 cellSch->rachCfg.prachMskIndx++;
20393 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size)
20395 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20397 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) %\
20402 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) %\
20405 cellSch->rachCfg.prachMskIndx = 0;
20407 cellSch->rachCfg.applFrm.slot = \
20408 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20412 cellSch->rachCfg.applFrm.slot = \
20413 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20416 /* Note first param to this macro should always be the latest in time */
20417 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20418 while (sfDiff <= gap)
20420 rgSCHCmnUpdNxtPrchMskIdx(cell);
20421 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20428 * @brief Updates the PRACH MASK INDEX.
20432 * Function: rgSCHCmnUpdNxtPrchMskIdx
20433 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20434 * CFG is always >= "n"+"DELTA", where "n" is the crntTime
20435 * of the cell. If not, applFrm is updated to the next avl
20436 * PRACH oppurtunity as per the PRACH Cfg Index configuration.
20439 * Invoked by: Common Scheduler
20441 * @param[in] RgSchCellCb *cell
20445 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx
20450 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx(cell)
20454 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20455 TRC2(rgSCHCmnUpdNxtPrchMskIdx);
20457 /* Determine the next prach mask Index */
20458 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size - 1)
20460 /* PRACH within applFrm.sfn are done, go to next AVL sfn */
20461 cellSch->rachCfg.prachMskIndx = 0;
20462 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20464 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) % \
20467 else/* RGR_SFN_EVEN or RGR_SFN_ODD */
20469 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) % \
20472 cellSch->rachCfg.applFrm.slot = cell->rachCfg.raOccasion.\
20475 else /* applFrm.sfn is still valid */
20477 cellSch->rachCfg.prachMskIndx += 1;
20478 if ( cellSch->rachCfg.prachMskIndx < RGR_MAX_SUBFRAME_NUM )
20480 cellSch->rachCfg.applFrm.slot = \
20481 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20488 * @brief Updates the Ded preamble RACH parameters
20493 * Function: rgSCHCmnUpdRachParam
20494 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20495 * CFG is always >= "n"+"6"+"DELTA", where "n" is the crntTime
20496 * of the cell. If not, applFrm is updated to the next avl
20497 * PRACH oppurtunity as per the PRACH Cfg Index configuration,
20498 * accordingly the "remDedPrm" is reset to "numDedPrm" and
20499 * "prachMskIdx" field is updated as per "applFrm".
20502 * Invoked by: Common Scheduler
20504 * @param[in] RgSchCellCb *cell
20508 PRIVATE Void rgSCHCmnUpdRachParam
20513 PRIVATE Void rgSCHCmnUpdRachParam(cell)
20518 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20519 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20521 TRC2(rgSCHCmnUpdRachParam);
20523 if (cell->macPreambleSet.pres == NOTPRSNT)
20527 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, \
20531 /* applFrm is still a valid next Prach Oppurtunity */
20534 rgSCHCmnUpdNxtPrchMskIdx(cell);
20535 /* Reset remDedPrm as numDedPrm */
20536 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20542 * @brief Dedicated Preamble allocation function.
20546 * Function: rgSCHCmnAllocPOParam
20547 * Purpose: Allocate pdcch, rapId and PrachMskIdx.
20548 * Set mapping of UE with the allocated rapId.
20550 * Invoked by: Common Scheduler
20552 * @param[in] RgSchCellCb *cell
20553 * @param[in] RgSchDlSf *dlSf
20554 * @param[in] RgSchUeCb *ue
20555 * @param[out] RgSchPdcch **pdcch
20556 * @param[out] U8 *rapId
20557 * @param[out] U8 *prachMskIdx
20561 PRIVATE S16 rgSCHCmnAllocPOParam
20566 RgSchPdcch **pdcch,
20571 PRIVATE S16 rgSCHCmnAllocPOParam(cell, dlSf, ue, pdcch, rapId, prachMskIdx)
20575 RgSchPdcch **pdcch;
20581 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20582 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20584 TRC2(rgSCHCmnAllocPOParam);
20586 if (cell->macPreambleSet.pres == PRSNT_NODEF)
20588 if (cellSch->rachCfg.remDedPrm == 0)
20592 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20593 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20597 /* The stored prachMskIdx is the index of PRACH Oppurtunities in
20598 * raOccasions.subframes[].
20599 * Converting the same to the actual PRACHMskIdx to be transmitted. */
20600 *prachMskIdx = cellSch->rachCfg.prachMskIndx + 1;
20601 /* Distribution starts from dedPrmStart till dedPrmStart + numDedPrm */
20602 *rapId = cellSch->rachCfg.dedPrmStart +
20603 cellSch->rachCfg.numDedPrm - cellSch->rachCfg.remDedPrm;
20604 cellSch->rachCfg.remDedPrm--;
20605 /* Map UE with the allocated RapId */
20606 ueDl->rachInfo.asgnOppr = cellSch->rachCfg.applFrm;
20607 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, cellSch->rachCfg.rapIdMap, (*rapId - cellSch->rachCfg.dedPrmStart));
20608 cmLListAdd2Tail(&cellSch->rachCfg.rapIdMap[*rapId - cellSch->rachCfg.dedPrmStart].assgndUes,
20609 &ueDl->rachInfo.rapIdLnk);
20610 ueDl->rachInfo.rapIdLnk.node = (PTR)ue;
20611 ueDl->rachInfo.poRapId = *rapId;
20613 else /* if dedicated preambles not configured */
20615 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20616 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20628 * @brief Dowlink Scheduling Handler.
20632 * Function: rgSCHCmnGenPdcchOrder
20633 * Purpose: For each UE in PO Q, grab a PDCCH,
20634 * get an available ded RapId and fill PDCCH
20635 * with PO information.
20637 * Invoked by: Common Scheduler
20639 * @param[in] RgSchCellCb *cell
20640 * @param[in] RgSchDlSf *dlSf
20644 PRIVATE Void rgSCHCmnGenPdcchOrder
20650 PRIVATE Void rgSCHCmnGenPdcchOrder(cell, dlSf)
20655 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20656 CmLList *node = cellSch->rachCfg.pdcchOdrLst.first;
20660 RgSchPdcch *pdcch = NULLP;
20662 TRC2(rgSCHCmnGenPdcchOrder);
20666 ue = (RgSchUeCb *)node->node;
20668 /* Skip sending for this subframe is Measuring or inActive in UL due
20669 * to MeasGap or inactie due to DRX
20671 if ((ue->measGapCb.isMeasuring == TRUE) ||
20672 (ue->ul.ulInactvMask & RG_MEASGAP_INACTIVE) ||
20673 (ue->isDrxEnabled &&
20674 ue->dl.dlInactvMask & RG_DRX_INACTIVE)
20679 if (rgSCHCmnAllocPOParam(cell, dlSf, ue, &pdcch, &rapId,\
20680 &prachMskIdx) != ROK)
20682 /* No More rapIds left for the valid next avl Oppurtunity.
20683 * Unsatisfied UEs here would be given a chance, when the
20684 * prach Mask Index changes as per rachUpd every TTI */
20686 /* PDDCH can also be ordered with rapId=0, prachMskIdx=0
20687 * so that UE triggers a RACH procedure with non-dedicated preamble.
20688 * But the implementation here does not do this. Instead, the "break"
20689 * here implies, that PDCCH Odr always given with valid rapId!=0,
20690 * prachMskIdx!=0 if dedicated preambles are configured.
20691 * If not configured, then trigger a PO with rapId=0,prchMskIdx=0*/
20694 /* Fill pdcch with pdcch odr information */
20695 rgSCHCmnFillPdcchOdr2Sf(cell, ue, pdcch, rapId, prachMskIdx);
20696 /* Remove this UE from the PDCCH ORDER QUEUE */
20697 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20698 /* Reset UE's power state */
20699 rgSCHPwrUeReset(cell, ue);
20706 * @brief This function add UE to PdcchOdr Q if not already present.
20710 * Function: rgSCHCmnDlAdd2PdcchOdrQ
20713 * Invoked by: CMN Scheduler
20715 * @param[in] RgSchCellCb* cell
20716 * @param[in] RgSchUeCb* ue
20721 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ
20727 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ(cell, ue)
20732 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20733 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20735 TRC2(rgSCHCmnDlAdd2PdcchOdrQ);
20737 if (ueDl->rachInfo.poLnk.node == NULLP)
20739 cmLListAdd2Tail(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20740 ueDl->rachInfo.poLnk.node = (PTR)ue;
20747 * @brief This function rmvs UE to PdcchOdr Q if not already present.
20751 * Function: rgSCHCmnDlRmvFrmPdcchOdrQ
20754 * Invoked by: CMN Scheduler
20756 * @param[in] RgSchCellCb* cell
20757 * @param[in] RgSchUeCb* ue
20762 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ
20768 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue)
20773 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20774 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20776 TRC2(rgSCHCmnDlRmvFrmPdcchOdrQ);
20778 cmLListDelFrm(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20779 ueDl->rachInfo.poLnk.node = NULLP;
20784 * @brief Fill pdcch with PDCCH order information.
20788 * Function: rgSCHCmnFillPdcchOdr2Sf
20789 * Purpose: Fill PDCCH with PDCCH order information,
20791 * Invoked by: Common Scheduler
20793 * @param[in] RgSchUeCb *ue
20794 * @param[in] RgSchPdcch *pdcch
20795 * @param[in] U8 rapId
20796 * @param[in] U8 prachMskIdx
20800 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf
20809 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf(ue, pdcch, rapId, prachMskIdx)
20817 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
20819 TRC2(rgSCHCmnFillPdcchOdr2Sf);
20821 pdcch->rnti = ue->ueId;
20822 pdcch->dci.dciFormat = TFU_DCI_FORMAT_1A;
20823 pdcch->dci.u.format1aInfo.isPdcchOrder = TRUE;
20824 pdcch->dci.u.format1aInfo.t.pdcchOrder.preambleIdx = rapId;
20825 pdcch->dci.u.format1aInfo.t.pdcchOrder.prachMaskIdx = prachMskIdx;
20827 /* Request for APer CQI immediately after PDCCH Order */
20828 /* CR ccpu00144525 */
20830 if(ue->dl.ueDlCqiCfg.aprdCqiCfg.pres)
20832 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
20833 acqiCb->aCqiTrigWt = 0;
20842 * @brief UE deletion for scheduler.
20846 * Function : rgSCHCmnDelRachInfo
20848 * This functions deletes all scheduler information
20849 * pertaining to an UE.
20851 * @param[in] RgSchCellCb *cell
20852 * @param[in] RgSchUeCb *ue
20856 PRIVATE Void rgSCHCmnDelRachInfo
20862 PRIVATE Void rgSCHCmnDelRachInfo(cell, ue)
20867 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20868 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20871 TRC2(rgSCHCmnDelRachInfo);
20873 if (ueDl->rachInfo.poLnk.node)
20875 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20877 if (ueDl->rachInfo.hoLnk.node)
20879 cmLListDelFrm(&cellSch->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
20880 ueDl->rachInfo.hoLnk.node = NULLP;
20882 if (ueDl->rachInfo.rapIdLnk.node)
20884 rapIdIdx = ueDl->rachInfo.poRapId - cellSch->rachCfg.dedPrmStart;
20885 cmLListDelFrm(&cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes,
20886 &ueDl->rachInfo.rapIdLnk);
20887 ueDl->rachInfo.rapIdLnk.node = NULLP;
20893 * @brief This function retrieves the ue which has sent this raReq
20894 * and it allocates grant for UEs undergoing (for which RAR
20895 * is being generated) HandOver/PdcchOrder.
20900 * Function: rgSCHCmnHdlHoPo
20901 * Purpose: This function retrieves the ue which has sent this raReq
20902 * and it allocates grant for UEs undergoing (for which RAR
20903 * is being generated) HandOver/PdcchOrder.
20905 * Invoked by: Common Scheduler
20907 * @param[in] RgSchCellCb *cell
20908 * @param[out] CmLListCp *raRspLst
20909 * @param[in] RgSchRaReqInfo *raReq
20914 PRIVATE Void rgSCHCmnHdlHoPo
20917 CmLListCp *raRspLst,
20918 RgSchRaReqInfo *raReq
20921 PRIVATE Void rgSCHCmnHdlHoPo(cell, raRspLst, raReq)
20923 CmLListCp *raRspLst;
20924 RgSchRaReqInfo *raReq;
20927 RgSchUeCb *ue = raReq->ue;
20928 TRC2(rgSCHCmnHdlHoPo);
20930 if ( ue->isDrxEnabled )
20932 rgSCHDrxDedRa(cell,ue);
20934 rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq);
20939 * @brief This function retrieves the UE which has sent this raReq
20940 * for handover case.
20945 * Function: rgSCHCmnGetHoUe
20946 * Purpose: This function retrieves the UE which has sent this raReq
20947 * for handover case.
20949 * Invoked by: Common Scheduler
20951 * @param[in] RgSchCellCb *cell
20952 * @param[in] RgSchRaReqInfo *raReq
20953 * @return RgSchUeCb*
20957 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe
20963 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe(cell, rapId)
20968 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20972 RgSchCmnDlUe *ueDl;
20973 TRC2(rgSCHCmnGetHoUe);
20975 ueLst = &cellSch->rachCfg.hoUeLst;
20976 node = ueLst->first;
20979 ue = (RgSchUeCb *)node->node;
20981 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20982 if (ueDl->rachInfo.hoRapId == rapId)
20991 PRIVATE Void rgSCHCmnDelDedPreamble
20997 PRIVATE rgSCHCmnDelDedPreamble(cell, preambleId)
21002 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21006 RgSchCmnDlUe *ueDl;
21007 TRC2(rgSCHCmnDelDedPreamble);
21009 ueLst = &cellSch->rachCfg.hoUeLst;
21010 node = ueLst->first;
21013 ue = (RgSchUeCb *)node->node;
21015 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21016 if (ueDl->rachInfo.hoRapId == preambleId)
21018 cmLListDelFrm(ueLst, &ueDl->rachInfo.hoLnk);
21019 ueDl->rachInfo.hoLnk.node = (PTR)NULLP;
21025 * @brief This function retrieves the UE which has sent this raReq
21026 * for PDCCh Order case.
21031 * Function: rgSCHCmnGetPoUe
21032 * Purpose: This function retrieves the UE which has sent this raReq
21033 * for PDCCH Order case.
21035 * Invoked by: Common Scheduler
21037 * @param[in] RgSchCellCb *cell
21038 * @param[in] RgSchRaReqInfo *raReq
21039 * @return RgSchUeCb*
21043 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe
21047 CmLteTimingInfo timingInfo
21050 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe(cell, rapId, timingInfo)
21053 CmLteTimingInfo timingInfo;
21056 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21060 RgSchCmnDlUe *ueDl;
21062 TRC2(rgSCHCmnGetPoUe);
21064 rapIdIdx = rapId -cellSch->rachCfg.dedPrmStart;
21065 ueLst = &cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes;
21066 node = ueLst->first;
21069 ue = (RgSchUeCb *)node->node;
21071 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21072 /* Remove UEs irrespective.
21073 * Old UE associations are removed.*/
21074 cmLListDelFrm(ueLst, &ueDl->rachInfo.rapIdLnk);
21075 ueDl->rachInfo.rapIdLnk.node = (PTR)NULLP;
21076 if (RGSCH_TIMEINFO_SAME(ueDl->rachInfo.asgnOppr, timingInfo))
21087 * @brief This function returns the valid UL cqi for a given UE.
21091 * Function: rgSCHCmnUlGetCqi
21092 * Purpose: This function returns the "valid UL cqi" for a given UE
21093 * based on UE category
21095 * Invoked by: Scheduler
21097 * @param[in] RgSchUeCb *ue
21098 * @param[in] U8 ueCtgy
21102 PUBLIC U8 rgSCHCmnUlGetCqi
21106 CmLteUeCategory ueCtgy
21109 PUBLIC U8 rgSCHCmnUlGetCqi(cell, ue, ueCtgy)
21112 CmLteUeCategory ueCtgy;
21115 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21118 TRC2(rgSCHCmnUlGetCqi);
21120 cqi = ueUl->maxUlCqi;
21122 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21123 (ueUl->validUlCqi > ueUl->maxUlCqi)))
21125 cqi = ueUl->validUlCqi;
21128 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21129 (ueUl->crntUlCqi[0] > ueUl->maxUlCqi )))
21131 cqi = ueUl->crntUlCqi[0];
21135 }/* End of rgSCHCmnUlGetCqi */
21137 /***********************************************************
21139 * Func : rgSCHCmnUlRbAllocForPoHoUe
21141 * Desc : Do uplink RB allocation for a HO/PO UE.
21145 * Notes: Note that as of now, for retx, maxRb
21146 * is not considered. Alternatives, such
21147 * as dropping retx if it crosses maxRb
21148 * could be considered.
21152 **********************************************************/
21154 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe
21162 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, maxRb)
21169 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21170 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21171 U8 sbSize = cellUl->sbSize;
21172 U32 maxBits = ue->ul.maxBytesPerUePerTti*8;
21174 RgSchUlAlloc *alloc;
21184 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->msg3SchdHqProcIdx];
21185 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
21187 TRC2(rgSCHCmnUlRbAllocForPoHoUe);
21188 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
21192 /*MS_WORKAROUND for HO ccpu00121116*/
21193 cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
21194 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend], cqi);
21195 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21196 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
21197 while(iMcs > RG_SCH_CMN_MAX_MSG3_IMCS)
21200 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21201 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg);
21203 /* Filling the modorder in the grant structure*/
21204 RG_SCH_UL_MCS_TO_MODODR(iMcs,modOdr);
21205 if (!cell->isCpUlExtend)
21207 eff = rgSchCmnNorUlEff[0][iTbs];
21211 eff = rgSchCmnExtUlEff[0][iTbs];
21214 bits = ueUl->alloc.reqBytes * 8;
21216 #if (ERRCLASS & ERRCLS_DEBUG)
21223 if (bits < rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs))
21226 nPrb = numSb * sbSize;
21230 if (bits > maxBits)
21233 nPrb = bits * 1024 / eff / RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
21238 numSb = nPrb / sbSize;
21242 /*ccpu00128775:MOD-Change to get upper threshold nPrb*/
21243 nPrb = RGSCH_CEIL((RGSCH_CEIL(bits * 1024, eff)),
21244 RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl));
21249 numSb = RGSCH_DIV_ROUND(nPrb, sbSize);
21254 alloc = rgSCHCmnUlSbAlloc(sf, (U8)RGSCH_MIN(numSb, cellUl->maxSbPerUe),\
21256 if (alloc == NULLP)
21258 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
21259 "rgSCHCmnUlRbAllocForPoHoUe(): Could not get UlAlloc");
21262 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
21264 /* Filling the modorder in the grant structure start*/
21265 alloc->grnt.modOdr = (TfuModScheme) modOdr;
21266 alloc->grnt.iMcs = iMcs;
21267 alloc->grnt.iMcsCrnt = iMcsCrnt;
21268 alloc->grnt.hop = 0;
21269 /* Fix for ccpu00123915*/
21270 alloc->forMsg3 = TRUE;
21271 alloc->hqProc = proc;
21272 alloc->hqProc->ulSfIdx = cellUl->msg3SchdIdx;
21274 alloc->rnti = ue->ueId;
21275 /* updating initNumRbs in case of HO */
21277 ue->initNumRbs = alloc->grnt.numRb;
21279 ueUl->alloc.alloc = alloc;
21280 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
21281 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
21282 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
21283 /* MS_WORKAROUND for HO ccpu00121124*/
21284 /*[Adi temp change] Need to fil modOdr */
21285 RG_SCH_UL_MCS_TO_MODODR(alloc->grnt.iMcsCrnt,alloc->grnt.modOdr);
21286 rgSCHUhmNewTx(proc, ueUl->hqEnt.maxHqRetx, alloc);
21287 /* No grant attr recorded now */
21292 * @brief This function allocates grant for UEs undergoing (for which RAR
21293 * is being generated) HandOver/PdcchOrder.
21298 * Function: rgSCHCmnAllocPoHoGrnt
21299 * Purpose: This function allocates grant for UEs undergoing (for which RAR
21300 * is being generated) HandOver/PdcchOrder.
21302 * Invoked by: Common Scheduler
21304 * @param[in] RgSchCellCb *cell
21305 * @param[out] CmLListCp *raRspLst,
21306 * @param[in] RgSchUeCb *ue
21307 * @param[in] RgSchRaReqInfo *raReq
21312 PRIVATE Void rgSCHCmnAllocPoHoGrnt
21315 CmLListCp *raRspLst,
21317 RgSchRaReqInfo *raReq
21320 PRIVATE Void rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq)
21322 CmLListCp *raRspLst;
21324 RgSchRaReqInfo *raReq;
21327 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21328 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21330 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
21332 TRC2(rgSCHCmnAllocPoHoGrnt);
21334 /* Clearing previous allocs if any*/
21335 rgSCHCmnUlUeDelAllocs(cell, ue);
21336 /* Fix : syed allocs are limited */
21337 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
21341 ueUl->alloc.reqBytes = RG_SCH_MIN_GRNT_HOPO;
21342 if (rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, RGSCH_MAX_UL_RB) != ROK)
21347 /* Fill grant information */
21348 grnt = &ueUl->alloc.alloc->grnt;
21353 RLOG_ARG1(L_ERROR,DBG_INSTID,cell->instIdx, "Failed to get"
21354 "the grant for HO/PDCCH Order. CRNTI:%d",ue->ueId);
21357 ue->ul.rarGrnt.rapId = raReq->raReq.rapId;
21358 ue->ul.rarGrnt.hop = grnt->hop;
21359 ue->ul.rarGrnt.rbStart = grnt->rbStart;
21360 ue->ul.rarGrnt.numRb = grnt->numRb;
21361 ue->ul.rarGrnt.tpc = grnt->tpc;
21362 ue->ul.rarGrnt.iMcsCrnt = grnt->iMcsCrnt;
21363 ue->ul.rarGrnt.ta.pres = TRUE;
21364 ue->ul.rarGrnt.ta.val = raReq->raReq.ta;
21365 ue->ul.rarGrnt.datSz = grnt->datSz;
21366 if((sf->numACqiCount < RG_SCH_MAX_ACQI_PER_ULSF) && (RG_SCH_APCQI_NO != ue->dl.reqForCqi))
21370 /* Send two bits cqireq field if more than one cells are configured else one*/
21371 for (idx = 1;idx < CM_LTE_MAX_CELLS;idx++)
21373 if (ue->cellInfo[idx] != NULLP)
21375 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21379 if (idx == CM_LTE_MAX_CELLS)
21382 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21384 ue->dl.reqForCqi = RG_SCH_APCQI_NO;
21385 sf->numACqiCount++;
21389 ue->ul.rarGrnt.cqiReqBit = 0;
21391 /* Attach Ho/Po allocation to RAR Rsp cont free Lst */
21392 cmLListAdd2Tail(raRspLst, &ue->ul.rarGrnt.raRspLnk);
21393 ue->ul.rarGrnt.raRspLnk.node = (PTR)ue;
21399 * @brief This is a utility function to set the fields in
21400 * an UL harq proc which is identified for non-adaptive retx
21404 * Function: rgSCHCmnUlNonadapRetx
21405 * Purpose: Sets the fields in UL Harq proc for non-adaptive retx
21407 * @param[in] RgSchCmnUlCell *cellUl
21408 * @param[out] RgSchUlAlloc *alloc
21409 * @param[in] U8 idx
21414 PRIVATE Void rgSCHCmnUlNonadapRetx
21416 RgSchCmnUlCell *cellUl,
21417 RgSchUlAlloc *alloc,
21421 PRIVATE Void rgSCHCmnUlNonadapRetx(cellUl, alloc, idx)
21422 RgSchCmnUlCell *cellUl;
21423 RgSchUlAlloc *alloc;
21427 TRC2(rgSCHCmnUlNonadapRetx);
21428 rgSCHUhmRetx(alloc->hqProc, alloc);
21430 /* Update alloc to retx */
21431 alloc->hqProc->isRetx = TRUE;
21432 alloc->hqProc->ulSfIdx = cellUl->reTxIdx[idx];
21434 if (alloc->hqProc->rvIdx != 0)
21436 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[alloc->hqProc->rvIdx];
21440 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
21442 alloc->grnt.isRtx = TRUE;
21443 alloc->pdcch = NULLP;
21448 * @brief Check if 2 allocs overlap
21452 * Function : rgSCHCmnUlAllocsOvrLap
21454 * - Return TRUE if alloc1 and alloc2 overlap.
21456 * @param[in] RgSchUlAlloc *alloc1
21457 * @param[in] RgSchUlAlloc *alloc2
21461 PRIVATE Bool rgSCHCmnUlAllocsOvrLap
21463 RgSchUlAlloc *alloc1,
21464 RgSchUlAlloc *alloc2
21467 PRIVATE Bool rgSCHCmnUlAllocsOvrLap(alloc1, alloc2)
21468 RgSchUlAlloc *alloc1;
21469 RgSchUlAlloc *alloc2;
21473 TRC2(rgSCHCmnUlAllocsOvrLap);
21475 if (((alloc1->sbStart >= alloc2->sbStart) &&
21476 (alloc1->sbStart <= alloc2->sbStart + alloc2->numSb-1)) ||
21477 ((alloc2->sbStart >= alloc1->sbStart) &&
21478 (alloc2->sbStart <= alloc1->sbStart + alloc1->numSb-1)))
21486 * @brief Copy allocation Info from src to dst.
21490 * Function : rgSCHCmnUlCpyAllocInfo
21492 * - Copy allocation Info from src to dst.
21494 * @param[in] RgSchUlAlloc *srcAlloc
21495 * @param[in] RgSchUlAlloc *dstAlloc
21499 PRIVATE Void rgSCHCmnUlCpyAllocInfo
21502 RgSchUlAlloc *srcAlloc,
21503 RgSchUlAlloc *dstAlloc
21506 PRIVATE Void rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc)
21508 RgSchUlAlloc *srcAlloc;
21509 RgSchUlAlloc *dstAlloc;
21512 RgSchCmnUlUe *ueUl;
21513 TRC2(rgSCHCmnUlCpyAllocInfo);
21515 dstAlloc->grnt = srcAlloc->grnt;
21516 dstAlloc->hqProc = srcAlloc->hqProc;
21517 /* Fix : syed During UE context release, hqProc->alloc
21518 * was pointing to srcAlloc instead of dstAlloc and
21519 * freeing from incorrect sf->allocDb was
21520 * corrupting the list. */
21521 /* In case of SPS Occasion Allocation is done in advance and
21522 at a later time Hq Proc is linked. Hence HqProc
21523 pointer in alloc shall be NULL */
21525 if (dstAlloc->hqProc)
21528 dstAlloc->hqProc->alloc = dstAlloc;
21530 dstAlloc->ue = srcAlloc->ue;
21531 dstAlloc->rnti = srcAlloc->rnti;
21532 dstAlloc->forMsg3 = srcAlloc->forMsg3;
21533 dstAlloc->raCb = srcAlloc->raCb;
21534 dstAlloc->pdcch = srcAlloc->pdcch;
21535 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
21538 ueUl = RG_SCH_CMN_GET_UL_UE(dstAlloc->ue,cell);
21539 ueUl->alloc.alloc = dstAlloc;
21541 if (dstAlloc->ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
21543 if((dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc != NULLP)
21544 && (dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc == srcAlloc))
21546 dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc = dstAlloc;
21557 * @brief Update TX and RETX subframe's allocation
21562 * Function : rgSCHCmnUlInsAllocFrmNewSf2OldSf
21564 * - Release all preassigned allocations of newSf and merge
21566 * - If alloc of newSf collide with one or more allocs of oldSf
21567 * - mark all such allocs of oldSf for Adaptive Retx.
21568 * - Swap the alloc and hole DB references of oldSf and newSf.
21570 * @param[in] RgSchCellCb *cell
21571 * @param[in] RgSchUlSf *newSf
21572 * @param[in] RgSchUlSf *oldSf
21573 * @param[in] RgSchUlAlloc *srcAlloc
21577 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf
21582 RgSchUlAlloc *srcAlloc
21585 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, srcAlloc)
21589 RgSchUlAlloc *srcAlloc;
21592 RgSchUlAlloc *alloc, *dstAlloc, *nxtAlloc;
21594 /* MS_WORKAROUND ccpu00120827 */
21595 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
21597 TRC2(rgSCHCmnUlInsAllocFrmNewSf2OldSf);
21599 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21603 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21604 /* If there is an overlap between alloc and srcAlloc
21605 * then alloc is marked for Adaptive retx and it is released
21607 if (rgSCHCmnUlAllocsOvrLap(alloc, srcAlloc) == TRUE)
21609 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21610 rgSCHUtlUlAllocRls(oldSf, alloc);
21612 /* No further allocs spanning the srcAlloc subbands */
21613 if (srcAlloc->sbStart + srcAlloc->numSb - 1 <= alloc->sbStart)
21617 } while ((alloc = nxtAlloc) != NULLP);
21620 /* After freeing all the colliding allocs, request for an allocation
21621 * specifying the start and numSb with in txSf. This function should
21622 * always return positively with a nonNULL dstAlloc */
21623 /* MS_WORKAROUND ccpu00120827 */
21624 remAllocs = schCmnCell->ul.maxAllocPerUlSf - *oldSf->allocCountRef;
21627 /* Fix : If oldSf already has max Allocs then release the
21628 * old RETX alloc to make space for new alloc of newSf.
21629 * newSf allocs(i.e new Msg3s) are given higher priority
21630 * over retx allocs. */
21631 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21635 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21636 if (!alloc->mrgdNewTxAlloc)
21638 /* If alloc is for RETX */
21639 /* TODO: Incase of this ad also in case of choosing
21640 * and alloc for ADAP RETX, we need to send ACK for
21641 * the corresponding alloc in PHICH */
21642 #ifndef EMTC_ENABLE
21643 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc);
21645 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc,FALSE);
21649 }while((alloc = nxtAlloc) != NULLP);
21652 dstAlloc = rgSCHUtlUlGetSpfcAlloc(oldSf, srcAlloc->sbStart, srcAlloc->numSb);
21654 /* This should never happen */
21655 if (dstAlloc == NULLP)
21657 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"CRNTI:%d "
21658 "rgSCHUtlUlGetSpfcAlloc failed in rgSCHCmnUlInsAllocFrmNewSf2OldSf",
21663 /* Copy the srcAlloc's state information in to dstAlloc */
21664 rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc);
21665 /* Set new Tx merged Alloc Flag to TRUE, indicating that this
21666 * alloc shall not be processed for non-adaptive retransmission */
21667 dstAlloc->mrgdNewTxAlloc = TRUE;
21672 * @brief Merge all allocations of newSf to oldSf.
21676 * Function : rgSCHCmnUlMergeSfAllocs
21678 * - Merge all allocations of newSf to oldSf.
21679 * - If newSf's alloc collides with oldSf's alloc
21680 * then oldSf's alloc is marked for adaptive Retx
21681 * and is released from oldSf to create space for
21684 * @param[in] RgSchCellCb *cell
21685 * @param[in] RgSchUlSf *oldSf
21686 * @param[in] RgSchUlSf *newSf
21690 PRIVATE Void rgSCHCmnUlMergeSfAllocs
21697 PRIVATE Void rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf)
21703 RgSchUlAlloc *alloc, *nxtAlloc;
21704 TRC2(rgSCHCmnUlMergeSfAllocs);
21707 /* Merge each alloc of newSf in to oldSf
21708 * and release it from newSf */
21709 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21713 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21714 rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, alloc);
21715 rgSCHUtlUlAllocRls(newSf, alloc);
21716 } while((alloc = nxtAlloc) != NULLP);
21722 * @brief Swap Hole/Alloc DB context of newSf and oldSf.
21726 * Function : rgSCHCmnUlSwapSfAllocs
21728 * - Swap Hole/Alloc DB context of newSf and oldSf.
21730 * @param[in] RgSchCellCb *cell
21731 * @param[in] RgSchUlSf *oldSf
21732 * @param[in] RgSchUlSf *newSf
21736 PRIVATE Void rgSCHCmnUlSwapSfAllocs
21743 PRIVATE Void rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf)
21749 RgSchUlAllocDb *tempAllocDb = newSf->allocDb;
21750 RgSchUlHoleDb *tempHoleDb = newSf->holeDb;
21751 U8 tempAvailSbs = newSf->availSubbands;
21753 TRC2(rgSCHCmnUlSwapSfAllocs);
21756 newSf->allocDb = oldSf->allocDb;
21757 newSf->holeDb = oldSf->holeDb;
21758 newSf->availSubbands = oldSf->availSubbands;
21760 oldSf->allocDb = tempAllocDb;
21761 oldSf->holeDb = tempHoleDb;
21762 oldSf->availSubbands = tempAvailSbs;
21764 /* Fix ccpu00120610*/
21765 newSf->allocCountRef = &newSf->allocDb->count;
21766 oldSf->allocCountRef = &oldSf->allocDb->count;
21771 * @brief Perform non-adaptive RETX for non-colliding allocs.
21775 * Function : rgSCHCmnUlPrcNonAdptRetx
21777 * - Perform non-adaptive RETX for non-colliding allocs.
21779 * @param[in] RgSchCellCb *cell
21780 * @param[in] RgSchUlSf *newSf
21781 * @param[in] U8 idx
21785 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx
21792 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx)
21798 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21799 RgSchUlAlloc *alloc, *nxtAlloc;
21800 TRC2(rgSCHCmnUlPrcNonAdptRetx);
21802 /* perform non-adaptive retx allocation(adjustment) */
21803 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21807 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21808 /* A merged new TX alloc, reset the state and skip */
21809 if (alloc->mrgdNewTxAlloc)
21811 alloc->mrgdNewTxAlloc = FALSE;
21816 rgSCHCmnUlNonadapRetx(cellUl, alloc, idx);
21818 } while((alloc = nxtAlloc) != NULLP);
21824 * @brief Update TX and RETX subframe's allocation
21829 * Function : rgSCHCmnUlPrfmSfMerge
21831 * - Release all preassigned allocations of newSf and merge
21833 * - If alloc of newSf collide with one or more allocs of oldSf
21834 * - mark all such allocs of oldSf for Adaptive Retx.
21835 * - Swap the alloc and hole DB references of oldSf and newSf.
21836 * - The allocs which did not collide with pre-assigned msg3
21837 * allocs are marked for non-adaptive RETX.
21839 * @param[in] RgSchCellCb *cell
21840 * @param[in] RgSchUlSf *oldSf
21841 * @param[in] RgSchUlSf *newSf
21842 * @param[in] U8 idx
21846 PRIVATE Void rgSCHCmnUlPrfmSfMerge
21854 PRIVATE Void rgSCHCmnUlPrfmSfMerge(cell, oldSf, newSf, idx)
21861 TRC2(rgSCHCmnUlPrfmSfMerge);
21862 /* Preassigned resources for msg3 in newSf.
21863 * Hence do adaptive retx for all NACKED TXs */
21864 rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf);
21865 /* swap alloc and hole DBs of oldSf and newSf. */
21866 rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf);
21867 /* Here newSf has the resultant merged allocs context */
21868 /* Perform non-adaptive RETX for non-colliding allocs */
21869 rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx);
21875 * @brief Update TX and RETX subframe's allocation
21880 * Function : rgSCHCmnUlRmvCmpltdAllocs
21882 * - Free all Transmission which are ACKED
21883 * OR for which MAX retransmission have
21887 * @param[in] RgSchCellCb *cell,
21888 * @param[in] RgSchUlSf *sf
21892 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs
21898 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs(cell, sf)
21903 RgSchUlAlloc *alloc, *nxtAlloc;
21904 TRC2(rgSCHCmnUlRmvCmpltdAllocs);
21906 if ((alloc = rgSCHUtlUlAllocFirst(sf)) == NULLP)
21912 nxtAlloc = rgSCHUtlUlAllocNxt(sf, alloc);
21914 printf("rgSCHCmnUlRmvCmpltdAllocs:time(%d %d) alloc->hqProc->remTx %d hqProcId(%d) \n",cell->crntTime.sfn,cell->crntTime.slot,alloc->hqProc->remTx, alloc->grnt.hqProcId);
21916 alloc->hqProc->rcvdCrcInd = TRUE;
21917 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
21920 /* SR_RACH_STATS : MSG 3 MAX RETX FAIL*/
21921 if ((alloc->forMsg3 == TRUE) && (alloc->hqProc->remTx == 0))
21923 rgNumMsg3FailMaxRetx++;
21925 cell->tenbStats->sch.msg3Fail++;
21929 #ifdef MAC_SCH_STATS
21930 if(alloc->ue != NULLP)
21932 /* access from ulHarqProc*/
21933 RgSchUeCb *ueCb = alloc->ue;
21934 RgSchCmnUe *cmnUe = (RgSchCmnUe*)ueCb->sch;
21935 RgSchCmnUlUe *ulUe = &(cmnUe->ul);
21936 U8 cqi = ulUe->crntUlCqi[0];
21937 U16 numUlRetx = ueCb->ul.hqEnt.maxHqRetx - alloc->hqProc->remTx;
21939 hqRetxStats.ulCqiStat[(cqi - 1)].mcs = alloc->grnt.iMcs;
21944 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1++;
21947 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2++;
21950 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3++;
21953 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4++;
21956 hqRetxStats.ulCqiStat[(cqi - 1)].totalTx = \
21957 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1 + \
21958 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2 * 2) + \
21959 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3 * 3) + \
21960 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4 * 4);
21963 #endif /*MAC_SCH_STATS*/
21964 rgSCHCmnUlFreeAllocation(cell, sf, alloc);
21966 /*ccpu00106104 MOD added check for AckNackRep */
21967 /*added check for acknack so that adaptive retx considers ue
21968 inactivity due to ack nack repetition*/
21969 else if((alloc->ue != NULLP) && (TRUE != alloc->forMsg3))
21971 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21972 rgSCHUtlUlAllocRls(sf, alloc);
21974 } while ((alloc = nxtAlloc) != NULLP);
21980 * @brief Update an uplink subframe.
21984 * Function : rgSCHCmnRlsUlSf
21986 * For each allocation
21987 * - if no more tx needed
21988 * - Release allocation
21990 * - Perform retransmission
21992 * @param[in] RgSchUlSf *sf
21993 * @param[in] U8 idx
21997 PUBLIC Void rgSCHCmnRlsUlSf
22003 PUBLIC Void rgSCHCmnRlsUlSf(cell, idx)
22008 TRC2(rgSCHCmnRlsUlSf);
22010 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22012 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
22014 RgSchUlSf *oldSf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
22016 /* Initialize the reTxLst of UL HqProcs for RETX subframe */
22017 if (rgSCHUtlUlAllocFirst(oldSf) == NULLP)
22021 /* Release all completed TX allocs from sf */
22022 rgSCHCmnUlRmvCmpltdAllocs(cell, oldSf);
22024 oldSf->numACqiCount = 0;
22030 * @brief Handle uplink allocation for retransmission.
22034 * Function : rgSCHCmnUlUpdAllocRetx
22036 * - Perform adaptive retransmission
22038 * @param[in] RgSchUlSf *sf
22039 * @param[in] RgSchUlAlloc *alloc
22043 PRIVATE Void rgSCHCmnUlUpdAllocRetx
22046 RgSchUlAlloc *alloc
22049 PRIVATE Void rgSCHCmnUlUpdAllocRetx(cell, alloc)
22051 RgSchUlAlloc *alloc;
22054 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
22056 TRC2(rgSCHCmnUlUpdAllocRetx);
22058 alloc->hqProc->reTxAlloc.rnti = alloc->rnti;
22059 alloc->hqProc->reTxAlloc.numSb = alloc->numSb;
22060 alloc->hqProc->reTxAlloc.iMcs = alloc->grnt.iMcs;
22062 alloc->hqProc->reTxAlloc.dciFrmt = alloc->grnt.dciFrmt;
22063 alloc->hqProc->reTxAlloc.numLyr = alloc->grnt.numLyr;
22064 alloc->hqProc->reTxAlloc.vrbgStart = alloc->grnt.vrbgStart;
22065 alloc->hqProc->reTxAlloc.numVrbg = alloc->grnt.numVrbg;
22066 alloc->hqProc->reTxAlloc.modOdr = alloc->grnt.modOdr;
22068 //iTbs = rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs);
22069 //iTbs = alloc->grnt.iMcs;
22070 //RGSCH_ARRAY_BOUND_CHECK( 0, rgTbSzTbl[0], iTbs);
22071 alloc->hqProc->reTxAlloc.tbSz = alloc->grnt.datSz;
22072 //rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1]/8;
22073 alloc->hqProc->reTxAlloc.ue = alloc->ue;
22074 alloc->hqProc->reTxAlloc.forMsg3 = alloc->forMsg3;
22075 alloc->hqProc->reTxAlloc.raCb = alloc->raCb;
22077 /* Set as retransmission is pending */
22078 alloc->hqProc->isRetx = TRUE;
22079 alloc->hqProc->alloc = NULLP;
22080 alloc->hqProc->ulSfIdx = RGSCH_INVALID_INFO;
22082 printf("Adding Harq Proc Id in the retx list hqProcId %d \n",alloc->grnt.hqProcId);
22084 cmLListAdd2Tail(&cmnUlCell->reTxLst, &alloc->hqProc->reTxLnk);
22085 alloc->hqProc->reTxLnk.node = (PTR)alloc->hqProc;
22090 * @brief Attempts allocation for msg3s for which ADAP retransmissions
22095 * Function : rgSCHCmnUlAdapRetxAlloc
22097 * Attempts allocation for msg3s for which ADAP retransmissions
22100 * @param[in] RgSchCellCb *cell
22101 * @param[in] RgSchUlSf *sf
22102 * @param[in] RgSchUlHqProcCb *proc;
22103 * @param[in] RgSchUlHole *hole;
22107 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc
22111 RgSchUlHqProcCb *proc,
22115 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole)
22118 RgSchUlHqProcCb *proc;
22122 U8 numSb = proc->reTxAlloc.numSb;
22123 U8 iMcs = proc->reTxAlloc.iMcs;
22124 CmLteTimingInfo frm = cell->crntTime;
22125 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22128 RgSchUlAlloc *alloc;
22129 TRC2(rgSCHCmnUlAdapRetxAlloc);
22131 /* Fetch PDCCH for msg3 */
22132 /* ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
22133 /* Introduced timing delta for UL control */
22134 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
22135 dlSf = rgSCHUtlSubFrmGet(cell, frm);
22136 pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
22137 if (pdcch == NULLP)
22142 /* Fetch UL Alloc for msg3 */
22143 if (numSb <= hole->num)
22145 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
22150 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22151 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
22152 "UL Alloc fail for msg3 retx for rnti: %d\n",
22153 proc->reTxAlloc.rnti);
22157 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
22158 alloc->grnt.iMcs = iMcs;
22159 alloc->grnt.datSz = proc->reTxAlloc.tbSz;
22162 //RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
22164 /* Fill UL Alloc for msg3 */
22165 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
22166 alloc->grnt.nDmrs = 0;
22167 alloc->grnt.hop = 0;
22168 alloc->grnt.delayBit = 0;
22169 alloc->grnt.isRtx = TRUE;
22170 proc->ulSfIdx = cellUl->schdIdx;
22172 proc->schdTime = cellUl->schdTime;
22173 alloc->grnt.hqProcId = proc->procId;
22174 alloc->grnt.dciFrmt = proc->reTxAlloc.dciFrmt;
22175 alloc->grnt.numLyr = proc->reTxAlloc.numLyr;
22176 alloc->grnt.vrbgStart = proc->reTxAlloc.vrbgStart;
22177 alloc->grnt.numVrbg = proc->reTxAlloc.numVrbg;
22178 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
22179 alloc->grnt.modOdr = proc->reTxAlloc.modOdr;
22181 /* TODO : Hardcoding these as of now */
22182 alloc->grnt.hop = 0;
22183 alloc->grnt.SCID = 0;
22184 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
22185 alloc->grnt.PMI = 0;
22186 alloc->grnt.uciOnxPUSCH = 0;
22188 alloc->rnti = proc->reTxAlloc.rnti;
22189 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22190 alloc->ue = proc->reTxAlloc.ue;
22191 alloc->pdcch = pdcch;
22192 alloc->forMsg3 = proc->reTxAlloc.forMsg3;
22193 alloc->raCb = proc->reTxAlloc.raCb;
22194 alloc->hqProc = proc;
22195 alloc->isAdaptive = TRUE;
22197 sf->totPrb += alloc->grnt.numRb;
22199 /* FIX : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22202 alloc->raCb->msg3Grnt= alloc->grnt;
22204 /* To the crntTime, add the time at which UE will
22205 * actually send MSG3 */
22206 alloc->raCb->msg3AllocTime = cell->crntTime;
22207 RGSCH_INCR_SUB_FRAME(alloc->raCb->msg3AllocTime, RG_SCH_CMN_MIN_RETXMSG3_RECP_INTRVL);
22209 alloc->raCb->msg3AllocTime = cellUl->schdTime;
22211 rgSCHCmnUlAdapRetx(alloc, proc);
22212 /* Fill PDCCH with alloc info */
22213 pdcch->rnti = alloc->rnti;
22214 pdcch->dci.dciFormat = TFU_DCI_FORMAT_0;
22215 pdcch->dci.u.format0Info.hoppingEnbld = alloc->grnt.hop;
22216 pdcch->dci.u.format0Info.rbStart = alloc->grnt.rbStart;
22217 pdcch->dci.u.format0Info.numRb = alloc->grnt.numRb;
22218 pdcch->dci.u.format0Info.mcs = alloc->grnt.iMcsCrnt;
22219 pdcch->dci.u.format0Info.ndi = alloc->hqProc->ndi;
22220 pdcch->dci.u.format0Info.nDmrs = alloc->grnt.nDmrs;
22221 pdcch->dci.u.format0Info.tpcCmd = alloc->grnt.tpc;
22225 /* ulIdx setting for cfg 0 shall be appropriately fixed thru ccpu00109015 */
22226 pdcch->dci.u.format0Info.ulIdx = RG_SCH_ULIDX_MSB;
22227 pdcch->dci.u.format0Info.dai = RG_SCH_MAX_DAI_IDX;
22230 pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_0];
22234 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue,cell);
22236 alloc->ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
22239 ue->ul.nPrb = alloc->grnt.numRb;
22241 ueUl->alloc.alloc = alloc;
22242 /* FIx: Removed the call to rgSCHCmnUlAdapRetx */
22243 rgSCHCmnUlUeFillAllocInfo(cell, alloc->ue);
22244 /* Setting csireq as false for Adaptive Retx*/
22245 ueUl->alloc.alloc->pdcch->dci.u.format0Info.cqiReq = RG_SCH_APCQI_NO;
22246 pdcch->dciNumOfBits = alloc->ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
22248 /* Reset as retransmission is done */
22249 proc->isRetx = FALSE;
22251 else /* Intg fix */
22253 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22254 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
22255 "Num SB not suffiecient for adap retx for rnti: %d",
22256 proc->reTxAlloc.rnti);
22262 /* Fix: syed Adaptive Msg3 Retx crash. */
22264 * @brief Releases all Adaptive Retx HqProcs which failed for
22265 * allocations in this scheduling occassion.
22269 * Function : rgSCHCmnUlSfRlsRetxProcs
22272 * @param[in] RgSchCellCb *cell
22273 * @param[in] RgSchUlSf *sf
22277 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs
22283 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs(cell, sf)
22290 RgSchUlHqProcCb *proc;
22291 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22293 TRC2(rgSCHCmnUlSfRlsRetxProcs);
22295 cp = &(cellUl->reTxLst);
22299 proc = (RgSchUlHqProcCb *)node->node;
22301 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22302 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22303 proc->reTxLnk.node = (PTR)NULLP;
22310 * @brief Attempts allocation for UEs for which retransmissions
22315 * Function : rgSCHCmnUlSfReTxAllocs
22317 * Attempts allocation for UEs for which retransmissions
22320 * @param[in] RgSchCellCb *cell
22321 * @param[in] RgSchUlSf *sf
22325 PRIVATE Void rgSCHCmnUlSfReTxAllocs
22331 PRIVATE Void rgSCHCmnUlSfReTxAllocs(cell, sf)
22338 RgSchUlHqProcCb *proc;
22341 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
22342 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22343 TRC2(rgSCHCmnUlSfReTxAllocs);
22345 cp = &(cellUl->reTxLst);
22349 proc = (RgSchUlHqProcCb *)node->node;
22350 ue = proc->reTxAlloc.ue;
22352 /*ccpu00106104 MOD added check for AckNackRep */
22353 /*added check for acknack so that adaptive retx considers ue
22354 inactivity due to ack nack repetition*/
22355 if((ue != NULLP) &&
22356 ((ue->measGapCb.isMeasuring == TRUE)||
22357 (ue->ackNakRepCb.isAckNakRep == TRUE)))
22361 /* Fix for ccpu00123917: Check if maximum allocs per UL sf have been exhausted */
22362 if (((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
22363 || (sf->allocDb->count == schCmnCell->ul.maxAllocPerUlSf))
22365 /* No more UL BW then return */
22368 /* perform adaptive retx for UE's */
22369 if (rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole) == FALSE)
22373 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22374 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22375 /* Fix: syed Adaptive Msg3 Retx crash. */
22376 proc->reTxLnk.node = (PTR)NULLP;
22382 * @brief Handles RB allocation for downlink.
22386 * Function : rgSCHCmnDlRbAlloc
22388 * Invoking Module Processing:
22389 * - This function is invoked for DL RB allocation
22391 * Processing Steps:
22392 * - If cell is frequency selecive,
22393 * - Call rgSCHDlfsAllocRb().
22395 * - Call rgSCHCmnNonDlfsRbAlloc().
22397 * @param[in] RgSchCellCb *cell
22398 * @param[in] RgSchDlRbAllocInfo *allocInfo
22403 PRIVATE Void rgSCHCmnDlRbAlloc
22406 RgSchCmnDlRbAllocInfo *allocInfo
22409 PRIVATE Void rgSCHCmnDlRbAlloc(cell, allocInfo)
22411 RgSchCmnDlRbAllocInfo *allocInfo;
22414 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
22415 TRC2(rgSCHCmnDlRbAlloc);
22417 if (cellSch->dl.isDlFreqSel)
22419 printf("5GTF_ERROR DLFS SCH Enabled\n");
22420 cellSch->apisDlfs->rgSCHDlfsAllocRb(cell, allocInfo);
22424 rgSCHCmnNonDlfsRbAlloc(cell, allocInfo);
22432 * @brief Determines number of RBGs and RBG subset sizes for the given DL
22433 * bandwidth and rbgSize
22436 * Function : rgSCHCmnDlGetRbgInfo
22439 * Processing Steps:
22440 * - Fill-up rbgInfo data structure for given DL bandwidth and rbgSize
22442 * @param[in] U8 dlTotalBw
22443 * @param[in] U8 dlSubsetBw
22444 * @param[in] U8 maxRaType1SubsetBw
22445 * @param[in] U8 rbgSize
22446 * @param[out] RgSchBwRbgInfo *rbgInfo
22450 PUBLIC Void rgSCHCmnDlGetRbgInfo
22454 U8 maxRaType1SubsetBw,
22456 RgSchBwRbgInfo *rbgInfo
22459 PUBLIC Void rgSCHCmnDlGetRbgInfo(dlTotalBw, dlSubsetBw, maxRaType1SubsetBw,
22463 U8 maxRaType1SubsetBw;
22465 RgSchBwRbgInfo *rbgInfo;
22468 #ifdef RGSCH_SPS_UNUSED
22470 U8 lastRbgIdx = ((dlTotalBw + rbgSize - 1)/rbgSize) - 1;
22471 U8 currRbgSize = rbgSize;
22472 U8 subsetSizeIdx = 0;
22473 U8 subsetSize[RG_SCH_NUM_RATYPE1_SUBSETS] = {0};
22474 U8 lastRbgSize = rbgSize - (dlTotalBw - ((dlTotalBw/rbgSize) * rbgSize));
22475 U8 numRaType1Rbgs = (maxRaType1SubsetBw + rbgSize - 1)/rbgSize;
22478 /* Compute maximum number of SPS RBGs for the cell */
22479 rbgInfo->numRbgs = ((dlSubsetBw + rbgSize - 1)/rbgSize);
22481 #ifdef RGSCH_SPS_UNUSED
22482 /* Distribute RBGs across subsets except last RBG */
22483 for (;idx < numRaType1Rbgs - 1; ++idx)
22485 subsetSize[subsetSizeIdx] += currRbgSize;
22486 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22489 /* Computation for last RBG */
22490 if (idx == lastRbgIdx)
22492 currRbgSize = lastRbgSize;
22494 subsetSize[subsetSizeIdx] += currRbgSize;
22495 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22498 /* Update the computed sizes */
22499 #ifdef RGSCH_SPS_UNUSED
22500 rbgInfo->lastRbgSize = currRbgSize;
22502 rbgInfo->lastRbgSize = rbgSize -
22503 (dlSubsetBw - ((dlSubsetBw/rbgSize) * rbgSize));
22504 #ifdef RGSCH_SPS_UNUSED
22505 cmMemcpy((U8 *)rbgInfo->rbgSubsetSize, (U8 *) subsetSize, 4 * sizeof(U8));
22507 rbgInfo->numRbs = (rbgInfo->numRbgs * rbgSize > dlTotalBw) ?
22508 dlTotalBw:(rbgInfo->numRbgs * rbgSize);
22509 rbgInfo->rbgSize = rbgSize;
22513 * @brief Handles RB allocation for Resource allocation type 0
22517 * Function : rgSCHCmnDlRaType0Alloc
22519 * Invoking Module Processing:
22520 * - This function is invoked for DL RB allocation for resource allocation
22523 * Processing Steps:
22524 * - Determine the available positions in the rbgMask.
22525 * - Allocate RBGs in the available positions.
22526 * - Update RA Type 0, RA Type 1 and RA type 2 masks.
22528 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22529 * @param[in] U8 rbsReq
22530 * @param[in] RgSchBwRbgInfo *rbgInfo
22531 * @param[out] U8 *numAllocRbs
22532 * @param[out] RgSchDlSfAllocInfo *resAllocInfo
22533 * @param[in] Bool isPartialAlloc
22539 PUBLIC U8 rgSCHCmnDlRaType0Alloc
22541 RgSchDlSfAllocInfo *allocedInfo,
22543 RgSchBwRbgInfo *rbgInfo,
22545 RgSchDlSfAllocInfo *resAllocInfo,
22546 Bool isPartialAlloc
22549 PUBLIC U8 rgSCHCmnDlRaType0Alloc(allocedInfo, rbsReq, rbgInfo,
22550 numAllocRbs, resAllocInfo, isPartialAlloc)
22551 RgSchDlSfAllocInfo *allocedInfo;
22553 RgSchBwRbgInfo *rbgInfo;
22555 RgSchDlSfAllocInfo *resAllocInfo;
22556 Bool isPartialAlloc;
22559 /* Note: This function atttempts allocation only full allocation */
22560 U32 remNumRbs, rbgPosInRbgMask, ueRaType2Mask;
22561 U8 type2MaskIdx, cnt, rbIdx;
22563 U8 bestNumAvailRbs = 0;
22565 U8 numAllocRbgs = 0;
22566 U8 rbgSize = rbgInfo->rbgSize;
22567 U32 *rbgMask = &(resAllocInfo->raType0Mask);
22568 #ifdef RGSCH_SPS_UNUSED
22571 U32 *raType1Mask = resAllocInfo->raType1Mask;
22572 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22574 U32 *raType2Mask = resAllocInfo->raType2Mask;
22576 U32 allocedMask = allocedInfo->raType0Mask;
22578 maskSize = rbgInfo->numRbgs;
22581 RG_SCH_CMN_DL_COUNT_ONES(allocedMask, maskSize, &usedRbs);
22582 if (maskSize == usedRbs)
22584 /* All RBGs are allocated, including the last one */
22589 remNumRbs = (maskSize - usedRbs - 1) * rbgSize; /* vamsee: removed minus 1 */
22591 /* If last RBG is available, add last RBG size */
22592 if (!(allocedMask & (1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(maskSize - 1))))
22594 remNumRbs += rbgInfo->lastRbgSize;
22598 /* If complete allocation is needed, check if total requested RBs are available else
22599 * check the best available RBs */
22600 if (!isPartialAlloc)
22602 if (remNumRbs >= rbsReq)
22604 bestNumAvailRbs = rbsReq;
22609 bestNumAvailRbs = remNumRbs > rbsReq ? rbsReq : remNumRbs;
22612 /* Allocate for bestNumAvailRbs */
22613 if (bestNumAvailRbs)
22615 for (rbg = 0; rbg < maskSize - 1; ++rbg)
22617 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22618 if (!(allocedMask & rbgPosInRbgMask))
22620 /* Update RBG mask */
22621 *rbgMask |= rbgPosInRbgMask;
22623 /* Compute RB index of the first RB of the RBG allocated */
22624 rbIdx = rbg * rbgSize;
22626 for (cnt = 0; cnt < rbgSize; ++cnt)
22628 #ifdef RGSCH_SPS_UNUSED
22629 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22631 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22632 #ifdef RGSCH_SPS_UNUSED
22633 /* Update RBG mask for RA type 1 */
22634 raType1Mask[rbgSubset] |= ueRaType1Mask;
22635 raType1UsedRbs[rbgSubset]++;
22637 /* Update RA type 2 mask */
22638 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22641 *numAllocRbs += rbgSize;
22642 remNumRbs -= rbgSize;
22644 if (*numAllocRbs >= bestNumAvailRbs)
22650 /* If last RBG available and allocation is not completed, allocate
22652 if (*numAllocRbs < bestNumAvailRbs)
22654 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22655 *rbgMask |= rbgPosInRbgMask;
22656 *numAllocRbs += rbgInfo->lastRbgSize;
22658 /* Compute RB index of the first RB of the last RBG */
22659 rbIdx = ((rbgInfo->numRbgs - 1 ) * rbgSize ); /* removed minus 1 vamsee */
22661 for (cnt = 0; cnt < rbgInfo->lastRbgSize; ++cnt)
22663 #ifdef RGSCH_SPS_UNUSED
22664 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22666 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22667 #ifdef RGSCH_SPS_UNUSED
22668 /* Update RBG mask for RA type 1 */
22669 raType1Mask[rbgSubset] |= ueRaType1Mask;
22670 raType1UsedRbs[rbgSubset]++;
22672 /* Update RA type 2 mask */
22673 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22676 remNumRbs -= rbgInfo->lastRbgSize;
22679 /* Note: this should complete allocation, not checking for the
22683 RETVALUE(numAllocRbgs);
22686 #ifdef RGSCH_SPS_UNUSED
22688 * @brief Handles RB allocation for Resource allocation type 1
22692 * Function : rgSCHCmnDlRaType1Alloc
22694 * Invoking Module Processing:
22695 * - This function is invoked for DL RB allocation for resource allocation
22698 * Processing Steps:
22699 * - Determine the available positions in the subsets.
22700 * - Allocate RB in the available subset.
22701 * - Update RA Type1, RA type 0 and RA type 2 masks.
22703 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22704 * @param[in] U8 rbsReq
22705 * @param[in] RgSchBwRbgInfo *rbgInfo
22706 * @param[in] U8 startRbgSubset
22707 * @param[in] U8 *allocRbgSubset
22708 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22709 * @param[in] Bool isPartialAlloc
22712 * Number of allocated RBs
22716 PUBLIC U8 rgSCHCmnDlRaType1Alloc
22718 RgSchDlSfAllocInfo *allocedInfo,
22720 RgSchBwRbgInfo *rbgInfo,
22722 U8 *allocRbgSubset,
22723 RgSchDlSfAllocInfo *resAllocInfo,
22724 Bool isPartialAlloc
22727 PUBLIC U8 rgSCHCmnDlRaType1Alloc(allocedInfo, rbsReq,rbgInfo,startRbgSubset,
22728 allocRbgSubset, resAllocInfo, isPartialAlloc)
22729 RgSchDlSfAllocInfo *allocedInfo;
22731 RgSchBwRbgInfo *rbgInfo;
22733 U8 *allocRbgSubset;
22734 RgSchDlSfAllocInfo *resAllocInfo;
22735 Bool isPartialAlloc;
22738 /* Note: This function atttempts only full allocation */
22739 U8 *rbgSubsetSzArr;
22740 U8 type2MaskIdx, subsetIdx, rbIdx, rbInSubset, rbgInSubset;
22741 U8 offset, rbg, maskSize, bestSubsetIdx;
22743 U8 bestNumAvailRbs = 0;
22744 U8 numAllocRbs = 0;
22745 U32 ueRaType2Mask, ueRaType0Mask, rbPosInSubset;
22746 U32 remNumRbs, allocedMask;
22748 U8 rbgSize = rbgInfo->rbgSize;
22749 U8 rbgSubset = startRbgSubset;
22750 U32 *rbgMask = &resAllocInfo->raType0Mask;
22751 U32 *raType1Mask = resAllocInfo->raType1Mask;
22752 U32 *raType2Mask = resAllocInfo->raType2Mask;
22753 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22754 U32 *allocMask = allocedInfo->raType1Mask;
22756 /* Initialize the subset size Array */
22757 rbgSubsetSzArr = rbgInfo->rbgSubsetSize;
22759 /* Perform allocation for RA type 1 */
22760 for (subsetIdx = 0;subsetIdx < rbgSize; ++subsetIdx)
22762 allocedMask = allocMask[rbgSubset];
22763 maskSize = rbgSubsetSzArr[rbgSubset];
22765 /* Determine number of available RBs in the subset */
22766 usedRbs = allocedInfo->raType1UsedRbs[subsetIdx];
22767 remNumRbs = maskSize - usedRbs;
22769 if (remNumRbs >= rbsReq)
22771 bestNumAvailRbs = rbsReq;
22772 bestSubsetIdx = rbgSubset;
22775 else if (isPartialAlloc && (remNumRbs > bestNumAvailRbs))
22777 bestNumAvailRbs = remNumRbs;
22778 bestSubsetIdx = rbgSubset;
22781 rbgSubset = (rbgSubset + 1) % rbgSize;
22782 } /* End of for (each rbgsubset) */
22784 if (bestNumAvailRbs)
22786 /* Initialize alloced mask and subsetSize depending on the RBG
22787 * subset of allocation */
22789 maskSize = rbgSubsetSzArr[bestSubsetIdx];
22790 allocedMask = allocMask[bestSubsetIdx];
22791 RG_SCH_CMN_DL_GET_START_POS(allocedMask, maskSize,
22793 for (; startIdx < rbgSize; ++startIdx, ++startPos)
22795 for (rbInSubset = startPos; rbInSubset < maskSize;
22796 rbInSubset = rbInSubset + rbgSize)
22798 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
22799 if (!(allocedMask & rbPosInSubset))
22801 raType1Mask[bestSubsetIdx] |= rbPosInSubset;
22802 raType1UsedRbs[bestSubsetIdx]++;
22804 /* Compute RB index value for the RB being allocated */
22805 rbgInSubset = rbInSubset /rbgSize;
22806 offset = rbInSubset % rbgSize;
22807 rbg = (rbgInSubset * rbgSize) + bestSubsetIdx;
22808 rbIdx = (rbg * rbgSize) + offset;
22810 /* Update RBG mask for RA type 0 allocation */
22811 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22812 *rbgMask |= ueRaType0Mask;
22814 /* Update RA type 2 mask */
22815 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22816 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22818 /* Update the counters */
22821 if (numAllocRbs == bestNumAvailRbs)
22826 } /* End of for (each position in the subset mask) */
22827 if (numAllocRbs == bestNumAvailRbs)
22831 } /* End of for startIdx = 0 to rbgSize */
22833 *allocRbgSubset = bestSubsetIdx;
22834 } /* End of if (bestNumAvailRbs) */
22836 RETVALUE(numAllocRbs);
22840 * @brief Handles RB allocation for Resource allocation type 2
22844 * Function : rgSCHCmnDlRaType2Alloc
22846 * Invoking Module Processing:
22847 * - This function is invoked for DL RB allocation for resource allocation
22850 * Processing Steps:
22851 * - Determine the available positions in the mask
22852 * - Allocate best fit cosecutive RBs.
22853 * - Update RA Type2, RA type 1 and RA type 0 masks.
22855 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22856 * @param[in] U8 rbsReq
22857 * @param[in] RgSchBwRbgInfo *rbgInfo
22858 * @param[out] U8 *rbStart
22859 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22860 * @param[in] Bool isPartialAlloc
22863 * Number of allocated RBs
22867 PUBLIC U8 rgSCHCmnDlRaType2Alloc
22869 RgSchDlSfAllocInfo *allocedInfo,
22871 RgSchBwRbgInfo *rbgInfo,
22873 RgSchDlSfAllocInfo *resAllocInfo,
22874 Bool isPartialAlloc
22877 PUBLIC U8 rgSCHCmnDlRaType2Alloc(allocedInfo, rbsReq, rbgInfo, rbStart,
22878 resAllocInfo, isPartialAlloc)
22879 RgSchDlSfAllocInfo *allocedInfo;
22881 RgSchBwRbgInfo *rbgInfo;
22883 RgSchDlSfAllocInfo *resAllocInfo;
22884 Bool isPartialAlloc;
22887 U8 numAllocRbs = 0;
22889 U8 rbgSize = rbgInfo->rbgSize;
22890 U32 *rbgMask = &resAllocInfo->raType0Mask;
22891 #ifdef RGSCH_SPS_UNUSED
22892 U32 *raType1Mask = resAllocInfo->raType1Mask;
22894 U32 *raType2Mask = resAllocInfo->raType2Mask;
22895 #ifdef RGSCH_SPS_UNUSED
22896 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22898 U32 *allocedMask = allocedInfo->raType2Mask;
22900 /* Note: This function atttempts only full allocation */
22901 rgSCHCmnDlGetBestFitHole(allocedMask, rbgInfo->numRbs,
22902 raType2Mask, rbsReq, rbStart, &numAllocRbs, isPartialAlloc);
22905 /* Update the allocation in RA type 0 and RA type 1 masks */
22906 U8 rbCnt = numAllocRbs;
22907 #ifdef RGSCH_SPS_UNUSED
22916 /* Update RBG mask for RA type 0 allocation */
22917 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22918 *rbgMask |= ueRaType0Mask;
22920 #ifdef RGSCH_SPS_UNUSED
22921 /* Update RBG mask for RA type 1 */
22922 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22923 raType1Mask[rbgSubset] |= ueRaType1Mask;
22924 raType1UsedRbs[rbgSubset]++;
22926 /* Update the counters */
22932 RETVALUE(numAllocRbs);
22936 * @brief Determines RA type 0 mask from given RB index.
22940 * Function : rgSCHCmnGetRaType0Mask
22943 * Processing Steps:
22944 * - Determine RA Type 0 mask for given rbIdex and rbg size.
22946 * @param[in] U8 rbIdx
22947 * @param[in] U8 rbgSize
22948 * @return U32 RA type 0 mask
22951 PRIVATE U32 rgSCHCmnGetRaType0Mask
22957 PRIVATE U32 rgSCHCmnGetRaType0Mask(rbIdx, rbgSize)
22963 U32 rbgPosInRbgMask = 0;
22965 rbg = rbIdx/rbgSize;
22966 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22968 RETVALUE(rbgPosInRbgMask);
22971 #ifdef RGSCH_SPS_UNUSED
22973 * @brief Determines RA type 1 mask from given RB index.
22977 * Function : rgSCHCmnGetRaType1Mask
22980 * Processing Steps:
22981 * - Determine RA Type 1 mask for given rbIdex and rbg size.
22983 * @param[in] U8 rbIdx
22984 * @param[in] U8 rbgSize
22985 * @param[out] U8 *type1Subset
22986 * @return U32 RA type 1 mask
22989 PRIVATE U32 rgSCHCmnGetRaType1Mask
22996 PRIVATE U32 rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, type1Subset)
23002 U8 rbg, rbgSubset, rbgInSubset, offset, rbInSubset;
23005 rbg = rbIdx/rbgSize;
23006 rbgSubset = rbg % rbgSize;
23007 rbgInSubset = rbg/rbgSize;
23008 offset = rbIdx % rbgSize;
23009 rbInSubset = rbgInSubset * rbgSize + offset;
23010 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
23012 *type1Subset = rbgSubset;
23013 RETVALUE(rbPosInSubset);
23015 #endif /* RGSCH_SPS_UNUSED */
23017 * @brief Determines RA type 2 mask from given RB index.
23021 * Function : rgSCHCmnGetRaType2Mask
23024 * Processing Steps:
23025 * - Determine RA Type 2 mask for given rbIdx and rbg size.
23027 * @param[in] U8 rbIdx
23028 * @param[out] U8 *maskIdx
23029 * @return U32 RA type 2 mask
23032 PRIVATE U32 rgSCHCmnGetRaType2Mask
23038 PRIVATE U32 rgSCHCmnGetRaType2Mask(rbIdx, maskIdx)
23045 *maskIdx = rbIdx / 32;
23046 rbPosInType2 = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbIdx % 32);
23048 RETVALUE(rbPosInType2);
23052 * @brief Performs resource allocation for a non-SPS UE in SPS bandwidth
23056 * Function : rgSCHCmnAllocUeInSpsBw
23059 * Processing Steps:
23060 * - Determine allocation for the UE.
23061 * - Use resource allocation type 0, 1 and 2 for allocation
23062 * within maximum SPS bandwidth.
23064 * @param[in] RgSchDlSf *dlSf
23065 * @param[in] RgSchCellCb *cell
23066 * @param[in] RgSchUeCb *ue
23067 * @param[in] RgSchDlRbAlloc *rbAllocInfo
23068 * @param[in] Bool isPartialAlloc
23074 PUBLIC Bool rgSCHCmnAllocUeInSpsBw
23079 RgSchDlRbAlloc *rbAllocInfo,
23080 Bool isPartialAlloc
23083 PUBLIC Bool rgSCHCmnAllocUeInSpsBw(dlSf, cell, ue, rbAllocInfo, isPartialAlloc)
23087 RgSchDlRbAlloc *rbAllocInfo;
23088 Bool isPartialAlloc;
23091 U8 rbgSize = cell->rbgSize;
23092 U8 numAllocRbs = 0;
23093 U8 numAllocRbgs = 0;
23095 U8 idx, noLyr, iTbs;
23096 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
23097 RgSchDlSfAllocInfo *dlSfAlloc = &rbAllocInfo->dlSf->dlSfAllocInfo;
23098 RgSchBwRbgInfo *spsRbgInfo = &cell->spsBwRbgInfo;
23100 /* SPS_FIX : Check if this Hq proc is scheduled */
23101 if ((0 == rbAllocInfo->tbInfo[0].schdlngForTb) &&
23102 (0 == rbAllocInfo->tbInfo[1].schdlngForTb))
23107 /* Check if the requirement can be accomodated in SPS BW */
23108 if (dlSf->spsAllocdBw == spsRbgInfo->numRbs)
23110 /* SPS Bandwidth has been exhausted: no further allocations possible */
23113 if (!isPartialAlloc)
23115 if((dlSf->spsAllocdBw + rbAllocInfo->rbsReq) > spsRbgInfo->numRbs)
23121 /* Perform allocation for RA type 0 if rbsReq is multiple of RBG size (also
23122 * if RBG size = 1) */
23123 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23125 rbAllocInfo->rbsReq += (rbgSize - rbAllocInfo->rbsReq % rbgSize);
23126 numAllocRbgs = rgSCHCmnDlRaType0Alloc(dlSfAlloc,
23127 rbAllocInfo->rbsReq, spsRbgInfo, &numAllocRbs,
23128 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23130 #ifdef RGSCH_SPS_UNUSED
23131 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23133 /* If no RBS could be allocated, attempt RA TYPE 1 */
23135 numAllocRbs = rgSCHCmnDlRaType1Alloc(dlSfAlloc,
23136 rbAllocInfo->rbsReq, spsRbgInfo, (U8)dlSfAlloc->nxtRbgSubset,
23137 &rbAllocInfo->allocInfo.raType1.rbgSubset,
23138 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23142 dlSfAlloc->nxtRbgSubset =
23143 (rbAllocInfo->allocInfo.raType1.rbgSubset + 1 ) % rbgSize;
23147 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23149 numAllocRbs = rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23150 rbAllocInfo->rbsReq, spsRbgInfo,
23151 &rbStart, &rbAllocInfo->resAllocInfo, isPartialAlloc);
23158 if (!(rbAllocInfo->pdcch =
23159 rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi,\
23160 rbAllocInfo->dciFormat, FALSE)))
23162 /* Note: Returning TRUE since PDCCH might be available for another UE */
23166 /* Update Tb info for each scheduled TB */
23167 iTbs = rbAllocInfo->tbInfo[0].iTbs;
23168 noLyr = rbAllocInfo->tbInfo[0].noLyr;
23169 rbAllocInfo->tbInfo[0].bytesAlloc =
23170 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;
23172 if (rbAllocInfo->tbInfo[1].schdlngForTb)
23174 iTbs = rbAllocInfo->tbInfo[1].iTbs;
23175 noLyr = rbAllocInfo->tbInfo[1].noLyr;
23176 rbAllocInfo->tbInfo[1].bytesAlloc =
23177 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;;
23180 /* Update rbAllocInfo with the allocation information */
23181 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23183 rbAllocInfo->allocInfo.raType0.dlAllocBitMask =
23184 rbAllocInfo->resAllocInfo.raType0Mask;
23185 rbAllocInfo->allocInfo.raType0.numDlAlloc = numAllocRbgs;
23187 #ifdef RGSCH_SPS_UNUSED
23188 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23190 rbAllocInfo->allocInfo.raType1.dlAllocBitMask =
23191 rbAllocInfo->resAllocInfo.raType1Mask[rbAllocInfo->allocInfo.raType1.rbgSubset];
23192 rbAllocInfo->allocInfo.raType1.numDlAlloc = numAllocRbs;
23193 rbAllocInfo->allocInfo.raType1.shift = 0;
23196 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23198 rbAllocInfo->allocInfo.raType2.isLocal = TRUE;
23199 rbAllocInfo->allocInfo.raType2.rbStart = rbStart;
23200 rbAllocInfo->allocInfo.raType2.numRb = numAllocRbs;
23203 rbAllocInfo->rbsAlloc = numAllocRbs;
23204 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
23206 /* Update allocation masks for RA types 0, 1 and 2 in DL SF */
23208 /* Update type 0 allocation mask */
23209 dlSfAlloc->raType0Mask |= rbAllocInfo->resAllocInfo.raType0Mask;
23210 #ifdef RGSCH_SPS_UNUSED
23211 /* Update type 1 allocation masks */
23212 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
23214 dlSfAlloc->raType1Mask[idx] |= rbAllocInfo->resAllocInfo.raType1Mask[idx];
23215 dlSfAlloc->raType1UsedRbs[idx] +=
23216 rbAllocInfo->resAllocInfo.raType1UsedRbs[idx];
23219 /* Update type 2 allocation masks */
23220 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
23222 dlSfAlloc->raType2Mask[idx] |= rbAllocInfo->resAllocInfo.raType2Mask[idx];
23225 dlSf->spsAllocdBw += numAllocRbs;
23229 /***********************************************************
23231 * Func : rgSCHCmnDlGetBestFitHole
23234 * Desc : Converts the best fit hole into allocation and returns the
23235 * allocation information.
23245 **********************************************************/
23247 PRIVATE Void rgSCHCmnDlGetBestFitHole
23251 U32 *crntAllocMask,
23255 Bool isPartialAlloc
23258 PRIVATE Void rgSCHCmnDlGetBestFitHole (allocMask, numMaskRbs,
23259 crntAllocMask, rbsReq, allocStart, allocNumRbs, isPartialAlloc)
23262 U32 *crntAllocMask;
23266 Bool isPartialAlloc;
23269 U8 maskSz = (numMaskRbs + 31)/32;
23270 U8 maxMaskPos = (numMaskRbs % 32);
23271 U8 maskIdx, maskPos;
23272 U8 numAvailRbs = 0;
23273 U8 bestAvailNumRbs = 0;
23274 S8 bestStartPos = -1;
23276 U32 tmpMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23277 U32 bestMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23279 *allocNumRbs = numAvailRbs;
23282 for (maskIdx = 0; maskIdx < maskSz; ++maskIdx)
23285 if (maskIdx == (maskSz - 1))
23287 if (numMaskRbs % 32)
23289 maxMaskPos = numMaskRbs % 32;
23292 for (maskPos = 0; maskPos < maxMaskPos; ++maskPos)
23294 if (!(allocMask[maskIdx] & (1 << (31 - maskPos))))
23296 tmpMask[maskIdx] |= (1 << (31 - maskPos));
23297 if (startPos == -1)
23299 startPos = maskIdx * 32 + maskPos;
23302 if (numAvailRbs == rbsReq)
23304 *allocStart = (U8)startPos;
23305 *allocNumRbs = rbsReq;
23311 if (numAvailRbs > bestAvailNumRbs)
23313 bestAvailNumRbs = numAvailRbs;
23314 bestStartPos = startPos;
23315 cmMemcpy((U8 *)bestMask, (U8 *) tmpMask, 4 * sizeof(U32));
23319 cmMemset((U8 *)tmpMask, 0, 4 * sizeof(U32));
23322 if (*allocNumRbs == rbsReq)
23328 if (*allocNumRbs == rbsReq)
23330 /* Convert the hole into allocation */
23331 cmMemcpy((U8 *)crntAllocMask, (U8 *) tmpMask, 4 * sizeof(U32));
23336 if (bestAvailNumRbs && isPartialAlloc)
23338 /* Partial allocation could have been done */
23339 *allocStart = (U8)bestStartPos;
23340 *allocNumRbs = bestAvailNumRbs;
23341 /* Convert the hole into allocation */
23342 cmMemcpy((U8 *)crntAllocMask, (U8 *) bestMask, 4 * sizeof(U32));
23348 #endif /* LTEMAC_SPS */
23350 /***************************************************************************
23352 * NON-DLFS Allocation functions
23354 * *************************************************************************/
23358 * @brief Function to find out code rate
23362 * Function : rgSCHCmnFindCodeRate
23364 * Processing Steps:
23366 * @param[in] RgSchCellCb *cell
23367 * @param[in] RgSchDlSf *dlSf
23368 * @param[in,out] RgSchDlRbAlloc *allocInfo
23372 PRIVATE Void rgSCHCmnFindCodeRate
23376 RgSchDlRbAlloc *allocInfo,
23380 PRIVATE Void rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,idx)
23383 RgSchDlRbAlloc *allocInfo;
23392 /* Adjust the Imcs and bytes allocated also with respect to the adjusted
23393 RBs - Here we will find out the Imcs by identifying first Highest
23394 number of bits compared to the original bytes allocated. */
23396 * @brief Adjust IMCS according to tbSize and ITBS
23400 * Function : rgSCHCmnNonDlfsPbchTbImcsAdj
23402 * Processing Steps:
23403 * - Adjust Imcs according to tbSize and ITBS.
23405 * @param[in,out] RgSchDlRbAlloc *allocInfo
23406 * @param[in] U8 *idx
23410 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj
23413 RgSchDlRbAlloc *allocInfo,
23418 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj(cell,allocInfo, idx, rbsReq)
23420 RgSchDlRbAlloc *allocInfo;
23430 RgSchDlSf *dlSf = allocInfo->dlSf;
23432 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23433 noLyrs = allocInfo->tbInfo[idx].noLyr;
23435 if((allocInfo->raType == RG_SCH_CMN_RA_TYPE0))
23437 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + dlSf->lstRbgDfct), cell->rbgSize);
23438 noRbs = (noRbgs * cell->rbgSize) - dlSf->lstRbgDfct;
23442 noRbs = allocInfo->rbsReq;
23445 /* This line will help in case if tbs is zero and reduction in MCS is not possible */
23446 if (allocInfo->rbsReq == 0 )
23450 origBytesReq = rgTbSzTbl[noLyrs - 1][tbs][rbsReq - 1]/8;
23452 /* Find out the ITbs & Imcs by identifying first Highest
23453 number of bits compared to the original bytes allocated.*/
23456 if(((rgTbSzTbl[noLyrs - 1][0][noRbs - 1])/8) < origBytesReq)
23458 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyrs - 1], tbs);
23459 while(((rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1])/8) > origBytesReq)
23468 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1]/8;
23469 allocInfo->tbInfo[idx].iTbs = tbs;
23470 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23475 /* Added funcion to adjust TBSize*/
23477 * @brief Function to adjust the tbsize in case of subframe 0 & 5 when
23478 * we were not able to do RB alloc adjustment by adding extra required Rbs
23482 * Function : rgSCHCmnNonDlfsPbchTbSizeAdj
23484 * Processing Steps:
23486 * @param[in,out] RgSchDlRbAlloc *allocInfo
23487 * @param[in] U8 numOvrlapgPbchRb
23488 * @param[in] U8 idx
23489 * @param[in] U8 pbchSsRsSym
23493 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj
23495 RgSchDlRbAlloc *allocInfo,
23496 U8 numOvrlapgPbchRb,
23502 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,idx,bytesReq)
23503 RgSchDlRbAlloc *allocInfo;
23504 U8 numOvrlapgPbchRb;
23510 U32 reducedTbs = 0;
23514 noLyrs = allocInfo->tbInfo[idx].noLyr;
23516 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23518 reducedTbs = bytesReq - (((U32)numOvrlapgPbchRb * (U32)pbchSsRsSym * 6)/8);
23520 /* find out the ITbs & Imcs by identifying first Highest
23521 number of bits compared with reduced bits considering the bits that are
23522 reserved for PBCH/PSS/SSS */
23523 if(((rgTbSzTbl[noLyrs - 1][0][allocInfo->rbsReq - 1])/8) < reducedTbs)
23525 while(((rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1])/8) > reducedTbs)
23534 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1]/8;
23535 allocInfo->tbInfo[idx].iTbs = tbs;
23536 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23541 /* Added this function to find num of ovrlapping PBCH rb*/
23543 * @brief Function to find out how many additional rbs are available
23544 * in the entire bw which can be allocated to a UE
23547 * Function : rgSCHCmnFindNumAddtlRbsAvl
23549 * Processing Steps:
23550 * - Calculates number of additinal rbs available
23552 * @param[in] RgSchCellCb *cell
23553 * @param[in] RgSchDlSf *dlSf
23554 * @param[in,out] RgSchDlRbAlloc *allocInfo
23555 * @param[out] U8 addtlRbsAvl
23559 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl
23563 RgSchDlRbAlloc *allocInfo
23566 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl(cell,dlSf,allocInfo)
23569 RgSchDlRbAlloc *allocInfo;
23572 U8 addtlRbsAvl = 0;
23574 TRC2(rgSCHCmnFindNumAddtlRbsAvl)
23576 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23578 addtlRbsAvl = (((dlSf->type0End - dlSf->type2End + 1)*\
23579 cell->rbgSize) - dlSf->lstRbgDfct) - allocInfo->rbsReq;
23581 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23583 addtlRbsAvl = (dlSf->bw - dlSf->bwAlloced) - allocInfo->rbsReq;
23586 RETVALUE(addtlRbsAvl);
23589 /* Added this function to find num of ovrlapping PBCH rb*/
23591 * @brief Function to find out how many of the requested RBs are
23592 * falling in the center 6 RBs of the downlink bandwidth.
23595 * Function : rgSCHCmnFindNumPbchOvrlapRbs
23597 * Processing Steps:
23598 * - Calculates number of overlapping rbs
23600 * @param[in] RgSchCellCb *cell
23601 * @param[in] RgSchDlSf *dlSf
23602 * @param[in,out] RgSchDlRbAlloc *allocInfo
23603 * @param[out] U8* numOvrlapgPbchRb
23607 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs
23611 RgSchDlRbAlloc *allocInfo,
23612 U8 *numOvrlapgPbchRb
23615 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,numOvrlapgPbchRb)
23618 RgSchDlRbAlloc *allocInfo;
23619 U8 *numOvrlapgPbchRb;
23622 *numOvrlapgPbchRb = 0;
23623 TRC2(rgSCHCmnFindNumPbchOvrlapRbs)
23624 /*Find if we have already crossed the start boundary for PBCH 6 RBs,
23625 * if yes then lets find the number of RBs which are getting overlapped
23626 * with this allocation.*/
23627 if(dlSf->bwAlloced <= (cell->pbchRbStart))
23629 /*We have not crossed the start boundary of PBCH RBs. Now we need
23630 * to know that if take this allocation then how much PBCH RBs
23631 * are overlapping with this allocation.*/
23632 /* Find out the overlapping RBs in the centre 6 RBs */
23633 if((dlSf->bwAlloced + allocInfo->rbsReq) > cell->pbchRbStart)
23635 *numOvrlapgPbchRb = (dlSf->bwAlloced + allocInfo->rbsReq) - (cell->pbchRbStart);
23636 if(*numOvrlapgPbchRb > 6)
23637 *numOvrlapgPbchRb = 6;
23640 else if ((dlSf->bwAlloced > (cell->pbchRbStart)) &&
23641 (dlSf->bwAlloced < (cell->pbchRbEnd)))
23643 /*We have already crossed the start boundary of PBCH RBs.We need to
23644 * find that if we take this allocation then how much of the RBs for
23645 * this allocation will overlap with PBCH RBs.*/
23646 /* Find out the overlapping RBs in the centre 6 RBs */
23647 if(dlSf->bwAlloced + allocInfo->rbsReq < (cell->pbchRbEnd))
23649 /*If we take this allocation then also we are not crossing the
23650 * end boundary of PBCH 6 RBs.*/
23651 *numOvrlapgPbchRb = allocInfo->rbsReq;
23655 /*If we take this allocation then we are crossing the
23656 * end boundary of PBCH 6 RBs.*/
23657 *numOvrlapgPbchRb = (cell->pbchRbEnd) - dlSf->bwAlloced;
23664 * @brief Performs RB allocation adjustment if the requested RBs are
23665 * falling in the center 6 RBs of the downlink bandwidth.
23668 * Function : rgSCHCmnNonDlfsPbchRbAllocAdj
23670 * Processing Steps:
23671 * - Allocate consecutively available RBs.
23673 * @param[in] RgSchCellCb *cell
23674 * @param[in,out] RgSchDlRbAlloc *allocInfo
23675 * @param[in] U8 pbchSsRsSym
23679 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj
23682 RgSchDlRbAlloc *allocInfo,
23687 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo,pbchSsRsSym)
23689 RgSchDlRbAlloc *allocInfo;
23694 RgSchDlSf *dlSf = allocInfo->dlSf;
23695 U8 numOvrlapgPbchRb = 0;
23696 U8 numOvrlapgAdtlPbchRb = 0;
23698 U8 addtlRbsReq = 0;
23699 U8 moreAddtlRbsReq = 0;
23700 U8 addtlRbsAdd = 0;
23701 U8 moreAddtlRbsAdd = 0;
23709 TRC2(rgSCHCmnNonDlfsPbchRbAllocAdj);
23712 origRbsReq = allocInfo->rbsReq;
23713 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23715 totSym = (cell->isCpDlExtend) ? RGSCH_TOT_NUM_SYM_EXTCP : RGSCH_TOT_NUM_SYM_NORCP;
23717 /* Additional RBs are allocated by considering the loss due to
23718 the reserved symbols for CFICH, PBCH, PSS, SSS and cell specific RS */
23720 divResult = (numOvrlapgPbchRb * pbchSsRsSym)/totSym;
23721 if((numOvrlapgPbchRb * pbchSsRsSym) % totSym)
23725 addtlRbsReq = divResult;
23727 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, addtlRbsReq, addtlRbsAdd)
23729 /*Now RBs requires is original requested RBs + these additional RBs to make
23730 * up for PSS/SSS/BCCH.*/
23731 allocInfo->rbsReq = allocInfo->rbsReq + addtlRbsAdd;
23733 /*Check if with these additional RBs we have taken up, these are also falling
23734 * under PBCH RBs range, if yes then we would need to account for
23735 * PSS/BSS/BCCH for these additional RBs too.*/
23736 if(addtlRbsAdd && ((dlSf->bwAlloced + allocInfo->rbsReq - addtlRbsAdd) < (cell->pbchRbEnd)))
23738 if((dlSf->bwAlloced + allocInfo->rbsReq) <= (cell->pbchRbEnd))
23740 /*With additional RBs taken into account, we are not crossing the
23741 * PBCH RB end boundary.Thus here we need to account just for
23742 * overlapping PBCH RBs for these additonal RBs.*/
23743 divResult = (addtlRbsAdd * pbchSsRsSym)/totSym;
23744 if((addtlRbsAdd * pbchSsRsSym) % totSym)
23749 moreAddtlRbsReq = divResult;
23751 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23753 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23758 /*Here we have crossed the PBCH RB end boundary, thus we need to take
23759 * into account the overlapping RBs for additional RBs which will be
23760 * subset of addtlRbs.*/
23761 numOvrlapgAdtlPbchRb = (cell->pbchRbEnd) - ((dlSf->bwAlloced + allocInfo->rbsReq) - addtlRbsAdd);
23763 divResult = (numOvrlapgAdtlPbchRb * pbchSsRsSym)/totSym;
23764 if((numOvrlapgAdtlPbchRb * pbchSsRsSym) % totSym)
23769 moreAddtlRbsReq = divResult;
23771 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23773 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23776 if (isBcchPcch == TRUE)
23781 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23784 /* This case might be for Imcs value 6 and NPrb = 1 case - Not
23785 Adjusting either RBs or Imcs or Bytes Allocated */
23786 allocInfo->rbsReq = allocInfo->rbsReq - addtlRbsAdd - moreAddtlRbsAdd;
23788 else if(tbs && ((0 == addtlRbsAdd) && (moreAddtlRbsAdd == 0)))
23790 /*In case of a situation where we the entire bandwidth is already occupied
23791 * and we dont have room to add additional Rbs then in order to decrease the
23792 * code rate we reduce the tbsize such that we reduce the present calculated
23793 * tbsize by number of bytes that would be occupied by PBCH/PSS/SSS in overlapping
23794 * rbs and find the nearest tbsize which would be less than this deduced value*/
23796 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23798 noLyr = allocInfo->tbInfo[0].noLyr;
23799 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyr - 1], tbs);
23800 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23802 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,bytesReq);
23804 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23806 noLyr = allocInfo->tbInfo[1].noLyr;
23807 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23808 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,bytesReq);
23812 else if(tbs && ((addtlRbsAdd != addtlRbsReq) ||
23813 (addtlRbsAdd && (moreAddtlRbsReq != moreAddtlRbsAdd))))
23815 /*In case of a situation where we were not able to add required number of
23816 * additional RBs then we adjust the Imcs based on original RBs requested.
23817 * Doing this would comensate for the few extra Rbs we have added but inorder
23818 * to comensate for number of RBS we couldnt add we again do the TBSize adjustment*/
23820 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23822 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23824 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23827 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23828 numOvrlapgPbchRb = numOvrlapgPbchRb - (addtlRbsAdd + moreAddtlRbsAdd);
23830 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,allocInfo->tbInfo[0].bytesReq);
23832 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23834 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,allocInfo->tbInfo[1].bytesReq);
23840 /*We hit this code when we were able to add the required additional RBS
23841 * hence we should adjust the IMcs based on orignals RBs requested*/
23843 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23845 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23847 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23852 } /* end of rgSCHCmnNonDlfsPbchRbAllocAdj */
23856 * @brief Performs RB allocation for frequency non-selective cell.
23860 * Function : rgSCHCmnNonDlfsCmnRbAlloc
23862 * Processing Steps:
23863 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
23865 * @param[in] RgSchCellCb *cell
23866 * @param[in, out] RgSchDlRbAlloc *allocInfo
23872 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc
23875 RgSchDlRbAlloc *allocInfo
23878 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
23880 RgSchDlRbAlloc *allocInfo;
23886 U8 pbchSsRsSym = 0;
23889 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
23891 RgSchDlSf *dlSf = allocInfo->dlSf;
23894 U8 spsRbsAlloc = 0;
23895 RgSchDlSfAllocInfo *dlSfAlloc = &allocInfo->dlSf->dlSfAllocInfo;
23897 TRC2(rgSCHCmnNonDlfsCmnRbAlloc);
23899 allocInfo->tbInfo[0].noLyr = 1;
23902 /* Note: Initialize the masks to 0, this might not be needed since alloInfo
23903 * is initialized to 0 at the beginning of allcoation */
23904 allocInfo->resAllocInfo.raType0Mask = 0;
23905 cmMemset((U8*)allocInfo->resAllocInfo.raType1Mask, 0,
23906 RG_SCH_NUM_RATYPE1_32BIT_MASK * sizeof (U32));
23907 cmMemset((U8*)allocInfo->resAllocInfo.raType2Mask, 0,
23908 RG_SCH_NUM_RATYPE2_32BIT_MASK * sizeof (U32));
23910 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
23911 (dlSf->bwAlloced == dlSf->bw))
23913 if(dlSf->bwAlloced == dlSf->bw)
23919 if (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced))
23922 if ((allocInfo->tbInfo[0].imcs < 29) && (dlSf->bwAlloced < dlSf->bw))
23924 if(allocInfo->tbInfo[0].imcs < 29)
23927 /* set the remaining RBs for the requested UE */
23928 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
23929 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23930 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[0][tbs][allocInfo->rbsReq - 1]/8;
23935 /* Attempt RA Type 2 allocation in SPS Bandwidth */
23936 if (dlSf->spsAllocdBw < cell->spsBwRbgInfo.numRbs)
23939 rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23940 allocInfo->rbsReq, &cell->spsBwRbgInfo, &rbStart,
23941 &allocInfo->resAllocInfo, FALSE);
23942 /* rbsAlloc assignment moved from line 16671 to here to avoid
23943 * compilation error. Recheck */
23944 dlSf->spsAllocdBw += spsRbsAlloc;
23947 #endif /* LTEMAC_SPS */
23955 /* Update allocation information */
23956 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
23957 if (allocInfo->pdcch == NULLP)
23961 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
23962 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
23963 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
23964 allocInfo->allocInfo.raType2.isLocal = TRUE;
23968 allocInfo->allocInfo.raType2.rbStart = rbStart;
23969 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
23970 allocInfo->rbsAlloc = allocInfo->rbsReq;
23981 if(!(dlSf->sfNum == 5))
23983 /* case for subframes 1 to 9 except 5 */
23985 allocInfo->allocInfo.raType2.rbStart = rbStart;
23987 /*Fix for ccpu00123918*/
23988 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
23993 pbchFrame = 1; /* case for subframe 5 */
23994 /* In subframe 5, symbols are reserved for PSS and SSS and CFICH
23995 and Cell Specific Reference Signals */
23996 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PSS_SSS_SYM) *
23997 RGSCH_NUM_SC_IN_RB + cell->numCellRSPerSf);
24003 /* In subframe 0, symbols are reserved for PSS, SSS, PBCH, CFICH and
24004 and Cell Specific Reference signals */
24005 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PBCH_SYM +
24006 RGSCH_NUM_PSS_SSS_SYM) * RGSCH_NUM_SC_IN_RB +
24007 cell->numCellRSPerSf);
24008 } /* end of outer else */
24011 (((dlSf->bwAlloced + allocInfo->rbsReq) - cell->pbchRbStart) > 0)&&
24012 (dlSf->bwAlloced < cell->pbchRbEnd))
24014 if(allocInfo->tbInfo[0].imcs < 29)
24016 rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo, pbchSsRsSym, TRUE);
24028 /*Fix for ccpu00123918*/
24029 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24030 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24031 allocInfo->rbsAlloc = allocInfo->rbsReq;
24033 /* LTE_ADV_FLAG_REMOVED_START */
24035 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
24037 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
24038 allocInfo->allocInfo.raType2.rbStart, \
24039 allocInfo->allocInfo.raType2.numRb);
24044 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
24045 allocInfo->allocInfo.raType2.rbStart, \
24046 allocInfo->allocInfo.raType2.numRb);
24052 /* LTE_ADV_FLAG_REMOVED_END */
24053 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24060 /* Update type 0, 1 and 2 masks */
24061 dlSfAlloc->raType0Mask |= allocInfo->resAllocInfo.raType0Mask;
24062 #ifdef RGSCH_SPS_UNUSED
24063 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
24065 dlSfAlloc->raType1Mask[idx] |=
24066 allocInfo->resAllocInfo.raType1Mask[idx];
24067 dlSfAlloc->raType1UsedRbs[idx] +=
24068 allocInfo->resAllocInfo.raType1UsedRbs[idx];
24071 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
24073 dlSfAlloc->raType2Mask[idx] |=
24074 allocInfo->resAllocInfo.raType2Mask[idx];
24084 * @brief Performs RB allocation for frequency non-selective cell.
24088 * Function : rgSCHCmnNonDlfsCmnRbAllocRar
24090 * Processing Steps:
24091 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
24093 * @param[in] RgSchCellCb *cell
24094 * @param[in, out] RgSchDlRbAlloc *allocInfo
24100 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAllocRar
24103 RgSchDlRbAlloc *allocInfo
24106 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
24108 RgSchDlRbAlloc *allocInfo;
24111 RgSchDlSf *dlSf = allocInfo->dlSf;
24112 TRC2(rgSCHCmnNonDlfsCmnRbAllocRar);
24115 if(dlSf->bwAlloced == dlSf->bw)
24120 allocInfo->tbInfo[0].noLyr = 1;
24122 /* Update allocation information */
24123 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
24124 if (allocInfo->pdcch == NULLP)
24128 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
24129 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
24130 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
24131 allocInfo->allocInfo.raType2.isLocal = TRUE;
24133 /*Fix for ccpu00123918*/
24134 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24135 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24136 allocInfo->rbsAlloc = allocInfo->rbsReq;
24138 /* LTE_ADV_FLAG_REMOVED_END */
24139 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24142 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, NULLP, dlSf, 13, TFU_DCI_FORMAT_B1, FALSE);
24143 if (allocInfo->pdcch == NULLP)
24147 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
24148 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
24150 printf("5GTF_ERROR vrbg allocated > 25\n");
24154 allocInfo->tbInfo[0].cmnGrnt.vrbgStart = beamInfo->vrbgStart;
24155 allocInfo->tbInfo[0].cmnGrnt.numVrbg = allocInfo->vrbgReq;
24157 /* Update allocation information */
24158 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
24160 allocInfo->tbInfo[0].cmnGrnt.xPDSCHRange = 1;
24161 allocInfo->tbInfo[0].cmnGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
24162 allocInfo->tbInfo[0].cmnGrnt.vrbgStart, allocInfo->tbInfo[0].cmnGrnt.numVrbg);
24164 allocInfo->tbInfo[0].cmnGrnt.rbStrt = (allocInfo->tbInfo[0].cmnGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
24165 allocInfo->tbInfo[0].cmnGrnt.numRb = (allocInfo->tbInfo[0].cmnGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
24167 beamInfo->vrbgStart += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24168 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24169 allocInfo->tbInfo[0].cmnGrnt.rv = 0;
24170 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24173 printf("\n[%s],allocInfo->tbInfo[0].bytesAlloc:%u,vrbgReq:%u\n",
24174 __func__,allocInfo->tbInfo[0].bytesAlloc,allocInfo->vrbgReq);
24180 /* LTE_ADV_FLAG_REMOVED_START */
24183 * @brief To check if DL BW available for non-DLFS allocation.
24187 * Function : rgSCHCmnNonDlfsBwAvlbl
24189 * Processing Steps:
24190 * - Determine availability based on RA Type.
24192 * @param[in] RgSchCellCb *cell
24193 * @param[in] RgSchDlSf *dlSf
24194 * @param[in] RgSchDlRbAlloc *allocInfo
24201 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl
24204 RgSchSFRPoolInfo **sfrpoolInfo,
24206 RgSchDlRbAlloc *allocInfo,
24210 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl(cell, sfrpoolInfo, dlSf, allocInfo, isUeCellEdge)
24212 RgSchSFRPoolInfo **sfrpoolInfo;
24214 RgSchDlRbAlloc *allocInfo;
24222 RgSchSFRPoolInfo *sfrPool;
24223 RgSchSFRPoolInfo *sfrCEPool;
24227 RgSchSFRPoolInfo *poolWithMaxAvlblBw = NULLP;
24229 U32 addtnlPRBs = 0;
24231 if (dlSf->bw <= dlSf->bwAlloced)
24233 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
24234 "BW is fully allocated for subframe (%d) CRNTI:%d", dlSf->sfNum,allocInfo->rnti);
24238 if (dlSf->sfrTotalPoolInfo.ccBwFull == TRUE)
24240 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24241 "BW is fully allocated for CC Pool CRNTI:%d",allocInfo->rnti);
24245 if ((dlSf->sfrTotalPoolInfo.ceBwFull == TRUE) && (isUeCellEdge))
24247 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24248 "BW is fully allocated for CE Pool CRNTI:%d",allocInfo->rnti);
24252 /* We first check if the ue scheduled is a cell edge or cell centre and accordingly check the avaialble
24253 memory in their pool. If the cell centre UE doesnt have Bw available in its pool, then it will check
24254 Bw availability in cell edge pool but the other way around is NOT possible. */
24257 l = &dlSf->sfrTotalPoolInfo.cePool;
24261 l = &dlSf->sfrTotalPoolInfo.ccPool;
24264 n = cmLListFirst(l);
24268 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24270 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24272 /* MS_FIX for ccpu00123919 : Number of RBs in case of RETX should be same as that of initial transmission. */
24273 if(allocInfo->tbInfo[0].tbCb->txCntr)
24275 /* If RB assignment is being done for RETX. Then if reqRbs are a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24276 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24277 if (allocInfo->rbsReq % cell->rbgSize == 0)
24279 if ((sfrPool->type2End == dlSf->type2End) && dlSf->lstRbgDfct)
24281 /* In this scenario we are wasting the last RBG for this dlSf */
24282 sfrPool->type0End--;
24283 sfrPool->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24285 dlSf->lstRbgDfct = 0;
24287 /*ABHINAV To check if these variables need to be taken care of*/
24289 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24294 if (dlSf->lstRbgDfct)
24296 /* Check if type0 allocation can cater to this RETX requirement */
24297 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24303 if (sfrPool->type2End != dlSf->type2End) /*Search again for some pool which has the END RBG of the BandWidth*/
24311 /* cannot allocate same number of required RBs */
24317 /*rg002.301 ccpu00120391 MOD condition is modified approprialtely to find if rbsReq is less than available RBS*/
24318 if(allocInfo->rbsReq <= (((sfrPool->type0End - sfrPool->type2End + 1)*\
24319 cell->rbgSize) - dlSf->lstRbgDfct))
24321 *sfrpoolInfo = sfrPool;
24326 if (sfrPool->bw <= sfrPool->bwAlloced + cell->rbgSize)
24328 n = cmLListNext(l);
24329 /* If the ue is cell centre then it will simply check the memory available in next pool.
24330 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24332 if((!isUeCellEdge) && (!n->node))
24334 l = &dlSf->sfrTotalPoolInfo.cePool;
24335 n = cmLListFirst(l);
24341 /* MS_FIX: Number of RBs in case of RETX should be same as that of initial transmission */
24342 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24344 /*rg002.301 ccpu00120391 MOD setting the remaining RBs for the requested UE*/
24345 allocInfo->rbsReq = (((sfrPool->type0End - sfrPool->type2End + 1)*\
24346 cell->rbgSize) - dlSf->lstRbgDfct);
24347 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24348 noLyrs = allocInfo->tbInfo[0].noLyr;
24349 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24350 *sfrpoolInfo = sfrPool;
24355 n = cmLListNext(l);
24357 /* If the ue is cell centre then it will simply check the memory available in next pool.
24358 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24359 if((!isUeCellEdge) && (!n->node))
24361 l = &dlSf->sfrTotalPoolInfo.cePool;
24362 n = cmLListFirst(l);
24368 // RETVALUE(FALSE);
24371 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24373 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24374 /* This is a Case where a UE was CC and had more RBs allocated than present in CE pool.
24375 In case this UE whn become CE with retx going on, then BW is not sufficient for Retx */
24376 if ((isUeCellEdge) &&
24377 (allocInfo->tbInfo[0].tbCb->txCntr != 0))
24379 if(allocInfo->rbsReq > (sfrPool->bw - sfrPool->bwAlloced))
24381 /* Adjust CE BW such that Retx alloc is successful */
24382 /* Check if merging CE with adjacent CC pool will be sufficient to process Retx */
24384 /* If no Type 0 allocations are made from this pool */
24385 if (sfrPool->type0End == (((sfrPool->poolendRB + 1) / cell->rbgSize) - 1))
24387 if (sfrPool->adjCCPool &&
24388 (sfrPool->adjCCPool->type2Start == sfrPool->poolendRB + 1) &&
24389 (allocInfo->rbsReq <= ((sfrPool->bw - sfrPool->bwAlloced) +
24390 ((sfrPool->adjCCPool->bw - sfrPool->adjCCPool->bwAlloced)))))
24392 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24394 /* Adjusting CE Pool Info */
24395 sfrPool->bw += addtnlPRBs;
24396 sfrPool->type0End = ((sfrPool->poolendRB + addtnlPRBs + 1) /
24397 cell->rbgSize) - 1;
24399 /* Adjusting CC Pool Info */
24400 sfrPool->adjCCPool->type2Start += addtnlPRBs;
24401 sfrPool->adjCCPool->type2End = RGSCH_CEIL(sfrPool->adjCCPool->type2Start,
24403 sfrPool->adjCCPool->bw -= addtnlPRBs;
24404 *sfrpoolInfo = sfrPool;
24411 /* Check if CC pool is one of the following:
24412 * 1. |CE| + |CC "CCPool2Exists" = TRUE|
24413 * 2. |CC "CCPool2Exists" = FALSE| + |CE| + |CC "CCPool2Exists" = TRUE|
24415 if(TRUE == sfrPool->CCPool2Exists)
24417 l1 = &dlSf->sfrTotalPoolInfo.cePool;
24418 n1 = cmLListFirst(l1);
24419 sfrCEPool = (RgSchSFRPoolInfo*)(n1->node);
24420 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced))
24422 *sfrpoolInfo = sfrCEPool;
24425 else if(allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24427 *sfrpoolInfo = sfrPool;
24430 /* Check if CE and CC boundary has unallocated prbs */
24431 else if ((sfrPool->poolstartRB == sfrPool->type2Start) &&
24432 (sfrCEPool->type0End == ((sfrCEPool->poolendRB + 1) / cell->rbgSize) - 1))
24434 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced) +
24435 (sfrPool->bw - sfrPool->bwAlloced))
24437 /* Checking if BW can be allocated partly from CE pool and partly
24440 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24441 /* Updating CE and CC type2 parametrs based on the RBs allocated
24442 * from these pools*/
24443 sfrPool->type2Start -= addtnlPRBs;
24444 sfrPool->type2End = RGSCH_CEIL(sfrPool->type2Start, cell->rbgSize);
24445 sfrPool->bw += addtnlPRBs;
24446 if (addtnlPRBs == (sfrCEPool->bw - sfrCEPool->bwAlloced))
24448 sfrCEPool->bwAlloced = sfrCEPool->bw;
24449 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24453 sfrCEPool->bw -= addtnlPRBs;
24454 sfrCEPool->type0End = ((sfrCEPool->poolendRB + 1 - addtnlPRBs) / cell->rbgSize) - 1;
24456 *sfrpoolInfo = sfrPool;
24459 else if ( bwAvlbl <
24460 ((sfrCEPool->bw - sfrCEPool->bwAlloced) +
24461 (sfrPool->bw - sfrPool->bwAlloced)))
24463 /* All the Prbs from CE BW shall be allocated */
24464 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24466 sfrPool->type2Start = sfrCEPool->type2Start;
24467 sfrPool->bw += sfrCEPool->bw - sfrCEPool->bwAlloced;
24468 sfrCEPool->type2Start = sfrCEPool->poolendRB + 1;
24469 sfrCEPool->bwAlloced = sfrCEPool->bw;
24470 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24472 /* set the remaining RBs for the requested UE */
24473 allocInfo->rbsReq = (sfrPool->bw - sfrPool->bwAlloced);
24474 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24475 noLyrs = allocInfo->tbInfo[0].noLyr;
24476 allocInfo->tbInfo[0].bytesReq =
24477 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24478 *sfrpoolInfo = sfrPool;
24489 /* Checking if no. of RBs required can be allocated from
24491 * 1. If available return the SFR pool.
24492 * 2. Else update the RBs required parameter based on the
24493 * BW available in the pool
24494 * 3. Return FALSE if no B/W is available.
24496 if (allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24498 *sfrpoolInfo = sfrPool;
24503 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24505 if (bwAvlbl < sfrPool->bw - sfrPool->bwAlloced)
24509 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24511 bwAvlbl = sfrPool->bw - sfrPool->bwAlloced;
24512 poolWithMaxAvlblBw = sfrPool;
24514 n = cmLListNext(l);
24516 if ((isUeCellEdge == FALSE) && (n == NULLP))
24518 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24520 l = &dlSf->sfrTotalPoolInfo.cePool;
24521 n = cmLListFirst(l);
24531 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24535 dlSf->sfrTotalPoolInfo.ccBwFull = TRUE;
24541 /* set the remaining RBs for the requested UE */
24542 allocInfo->rbsReq = poolWithMaxAvlblBw->bw -
24543 poolWithMaxAvlblBw->bwAlloced;
24544 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24545 noLyrs = allocInfo->tbInfo[0].noLyr;
24546 allocInfo->tbInfo[0].bytesReq =
24547 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24548 *sfrpoolInfo = poolWithMaxAvlblBw;
24555 n = cmLListNext(l);
24557 if ((isUeCellEdge == FALSE) && (n == NULLP))
24559 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24561 l = &dlSf->sfrTotalPoolInfo.cePool;
24562 n = cmLListFirst(l);
24577 #endif /* end of ifndef LTE_TDD*/
24578 /* LTE_ADV_FLAG_REMOVED_END */
24581 * @brief To check if DL BW available for non-DLFS allocation.
24585 * Function : rgSCHCmnNonDlfsUeRbAlloc
24587 * Processing Steps:
24588 * - Determine availability based on RA Type.
24590 * @param[in] RgSchCellCb *cell
24591 * @param[in] RgSchDlSf *dlSf
24592 * @param[in] RgSchDlRbAlloc *allocInfo
24599 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl
24603 RgSchDlRbAlloc *allocInfo
24606 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl(cell, dlSf, allocInfo)
24609 RgSchDlRbAlloc *allocInfo;
24614 U8 ignoredDfctRbg = FALSE;
24616 TRC2(rgSCHCmnNonDlfsBwAvlbl);
24617 if (dlSf->bw <= dlSf->bwAlloced)
24619 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId, "(%d:%d)FAILED CRNTI:%d",
24620 dlSf->bw, dlSf->bwAlloced,allocInfo->rnti);
24623 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24625 /* Fix for ccpu00123919 : Number of RBs in case of RETX should be same as
24626 * that of initial transmission. */
24627 if(allocInfo->tbInfo[0].tbCb->txCntr)
24629 /* If RB assignment is being done for RETX. Then if reqRbs are
24630 * a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24631 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24632 if (allocInfo->rbsReq % cell->rbgSize == 0)
24634 if (dlSf->lstRbgDfct)
24636 /* In this scenario we are wasting the last RBG for this dlSf */
24639 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24640 /* Fix: MUE_PERTTI_DL */
24641 dlSf->lstRbgDfct = 0;
24642 ignoredDfctRbg = TRUE;
24648 if (dlSf->lstRbgDfct)
24650 /* Check if type0 allocation can cater to this RETX requirement */
24651 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24658 /* cannot allocate same number of required RBs */
24664 /* Condition is modified approprialtely to find
24665 * if rbsReq is less than available RBS*/
24666 if(allocInfo->rbsReq <= (((dlSf->type0End - dlSf->type2End + 1)*\
24667 cell->rbgSize) - dlSf->lstRbgDfct))
24671 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24672 * allocation in TDD when requested RBs are more than available RBs*/
24675 /* MS_WORKAROUND for ccpu00122022 */
24676 if (dlSf->bw < dlSf->bwAlloced + cell->rbgSize)
24678 /* ccpu00132358- Re-assigning the values which were updated above
24679 * if it is RETX and Last RBG available*/
24680 if(ignoredDfctRbg == TRUE)
24683 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24684 dlSf->lstRbgDfct = 1;
24690 /* Fix: Number of RBs in case of RETX should be same as
24691 * that of initial transmission. */
24692 if(allocInfo->tbInfo[0].tbCb->txCntr == 0
24694 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24698 /* Setting the remaining RBs for the requested UE*/
24699 allocInfo->rbsReq = (((dlSf->type0End - dlSf->type2End + 1)*\
24700 cell->rbgSize) - dlSf->lstRbgDfct);
24701 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24702 noLyrs = allocInfo->tbInfo[0].noLyr;
24703 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24704 /* DwPts Scheduling Changes Start */
24706 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24708 allocInfo->tbInfo[0].bytesReq =
24709 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24712 /* DwPts Scheduling Changes End */
24716 /* ccpu00132358- Re-assigning the values which were updated above
24717 * if it is RETX and Last RBG available*/
24718 if(ignoredDfctRbg == TRUE)
24721 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24722 dlSf->lstRbgDfct = 1;
24725 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "FAILED for CRNTI:%d",
24727 printf ("RB Alloc failed for LAA TB type 0\n");
24733 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24735 if (allocInfo->rbsReq <= (dlSf->bw - dlSf->bwAlloced))
24739 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24740 * allocation in TDD when requested RBs are more than available RBs*/
24743 /* Fix: Number of RBs in case of RETX should be same as
24744 * that of initial transmission. */
24745 if((allocInfo->tbInfo[0].tbCb->txCntr == 0)
24747 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24751 /* set the remaining RBs for the requested UE */
24752 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
24753 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24754 noLyrs = allocInfo->tbInfo[0].noLyr;
24755 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24756 /* DwPts Scheduling Changes Start */
24758 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24760 allocInfo->tbInfo[0].bytesReq =
24761 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24764 /* DwPts Scheduling Changes End */
24768 printf ("RB Alloc failed for LAA TB type 2\n");
24769 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24772 /* Fix: Number of RBs in case of RETX should be same as
24773 * that of initial transmission. */
24777 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24780 /* LTE_ADV_FLAG_REMOVED_START */
24783 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24787 * Function : rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24789 * Processing Steps:
24791 * @param[in] RgSchCellCb *cell
24792 * @param[in] RgSchDlSf *dlSf
24793 * @param[in] U8 rbStrt
24794 * @param[in] U8 numRb
24799 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24807 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
24816 RgSchSFRPoolInfo *sfrPool;
24817 TRC2(rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc);
24819 l = &dlSf->sfrTotalPoolInfo.ccPool;
24821 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24822 dlSf->bwAlloced += numRb;
24823 dlSf->type2Start += numRb;
24824 n = cmLListFirst(l);
24828 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24829 n = cmLListNext(l);
24831 /* If the pool contains some RBs allocated in this allocation, e.g: Pool is [30.50]. Pool->type2Start is 40 , dlSf->type2Start is 45. then update the variables in pool */
24832 if((sfrPool->poolendRB >= dlSf->type2Start) && (sfrPool->type2Start < dlSf->type2Start))
24834 sfrPool->type2End = dlSf->type2End;
24835 sfrPool->bwAlloced = dlSf->type2Start - sfrPool->poolstartRB;
24836 sfrPool->type2Start = dlSf->type2Start;
24840 /* If the pool contains all RBs allocated in this allocation*/
24841 if(dlSf->type2Start > sfrPool->poolendRB)
24843 sfrPool->type2End = sfrPool->type0End + 1;
24844 sfrPool->bwAlloced = sfrPool->bw;
24845 sfrPool->type2Start = sfrPool->poolendRB + 1;
24850 if (l != &dlSf->sfrTotalPoolInfo.cePool)
24852 l = &dlSf->sfrTotalPoolInfo.cePool;
24853 n = cmLListFirst(l);
24863 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24867 * Function : rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24869 * Processing Steps:
24871 * @param[in] RgSchCellCb *cell
24872 * @param[in] RgSchDlSf *dlSf
24873 * @param[in] U8 rbStrt
24874 * @param[in] U8 numRb
24879 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24888 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc(cell, ue, dlSf, rbStrt, numRb)
24898 RgSchSFRPoolInfo *sfrCCPool1 = NULL;
24899 RgSchSFRPoolInfo *sfrCCPool2 = NULL;
24902 TRC2(rgSCHCmnNonDlfsUpdDSFRTyp2Alloc);
24903 /* Move the type2End pivot forward */
24906 l = &dlSf->sfrTotalPoolInfo.ccPool;
24907 n = cmLListFirst(l);
24910 sfrCCPool1 = (RgSchSFRPoolInfo*)(n->node);
24912 if (sfrCCPool1 == NULLP)
24914 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24915 "sfrCCPool1 is NULL for CRNTI:%d",ue->ueId);
24918 n = cmLListNext(l);
24921 sfrCCPool2 = (RgSchSFRPoolInfo*)(n->node);
24922 n = cmLListNext(l);
24924 if((sfrCCPool1) && (sfrCCPool2))
24926 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
24927 if(((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24928 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb)) ||
24929 ((dlSf->type2Start >= sfrCCPool2->pwrHiCCRange.startRb) &&
24930 (dlSf->type2Start + numRb < sfrCCPool2->pwrHiCCRange.endRb)))
24932 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24934 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24935 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24938 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24939 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
24946 if((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24947 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb))
24949 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24951 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24952 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24955 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24956 "rgSCHCmnBuildRntpInfo() function returned RFAILED CRNTI:%d",ue->ueId);
24962 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24964 dlSf->bwAlloced += numRb;
24965 /*MS_FIX for ccpu00123918*/
24966 dlSf->type2Start += numRb;
24970 #endif /* end of ifndef LTE_TDD*/
24971 /* LTE_ADV_FLAG_REMOVED_END */
24973 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24977 * Function : rgSCHCmnNonDlfsUpdTyp2Alloc
24979 * Processing Steps:
24981 * @param[in] RgSchCellCb *cell
24982 * @param[in] RgSchDlSf *dlSf
24983 * @param[in] U8 rbStrt
24984 * @param[in] U8 numRb
24989 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc
24997 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
25004 TRC2(rgSCHCmnNonDlfsUpdTyp2Alloc);
25005 /* Move the type2End pivot forward */
25006 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25007 //#ifndef LTEMAC_SPS
25008 dlSf->bwAlloced += numRb;
25009 /*Fix for ccpu00123918*/
25010 dlSf->type2Start += numRb;
25016 * @brief To do DL allocation using TYPE0 RA.
25020 * Function : rgSCHCmnNonDlfsType0Alloc
25022 * Processing Steps:
25023 * - Perform TYPE0 allocation using the RBGs between
25024 * type0End and type2End.
25025 * - Build the allocation mask as per RBG positioning.
25026 * - Update the allocation parameters.
25028 * @param[in] RgSchCellCb *cell
25029 * @param[in] RgSchDlSf *dlSf
25030 * @param[in] RgSchDlRbAlloc *allocInfo
25035 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
25039 RgSchDlRbAlloc *allocInfo,
25043 PRIVATE Void rgSCHCmnNonDlfsType0Alloc(cell, dlSf, allocInfo, dlUe)
25046 RgSchDlRbAlloc *allocInfo;
25050 U32 dlAllocMsk = 0;
25051 U8 rbgFiller = dlSf->lstRbgDfct;
25052 U8 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25053 //U8 noRbgs = (allocInfo->rbsReq + rbgFiller)/ cell->rbgSize;
25057 U32 tb1BytesAlloc = 0;
25058 U32 tb2BytesAlloc = 0;
25059 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25061 TRC2(rgSCHCmnNonDlfsType0Alloc);
25062 //if(noRbgs == 0) noRbgs = 1; /* Not required as ceilling is used above*/
25064 /* Fix for ccpu00123919*/
25065 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25066 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25072 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25075 /* Fix for ccpu00138701: Ceilling is using to derive num of RBGs, Therefore,
25076 * after this operation,checking Max TB size and Max RBs are not crossed
25077 * if it is crossed then decrement num of RBGs. */
25078 //if((noRbs + rbgFiller) % cell->rbgSize)
25079 if((noRbs > allocInfo->rbsReq) &&
25080 (allocInfo->rbsReq + rbgFiller) % cell->rbgSize)
25081 {/* considering ue category limitation
25082 * due to ceiling */
25085 if (rgSCHLaaIsLaaTB(allocInfo)== FALSE)
25088 if ((allocInfo->tbInfo[0].schdlngForTb) && (!allocInfo->tbInfo[0].tbCb->txCntr))
25090 iTbs = allocInfo->tbInfo[0].iTbs;
25091 noLyr = allocInfo->tbInfo[0].noLyr;
25092 tb1BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25095 if ((allocInfo->tbInfo[1].schdlngForTb) && (!allocInfo->tbInfo[1].tbCb->txCntr))
25097 iTbs = allocInfo->tbInfo[1].iTbs;
25098 noLyr = allocInfo->tbInfo[1].noLyr;
25099 tb2BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25103 /* Only Check for New Tx No need for Retx */
25104 if (tb1BytesAlloc || tb2BytesAlloc)
25106 if (( ue->dl.aggTbBits >= dlUe->maxTbBits) ||
25107 (tb1BytesAlloc >= dlUe->maxTbSz/8) ||
25108 (tb2BytesAlloc >= dlUe->maxTbSz/8) ||
25109 (noRbs >= dlUe->maxRb))
25115 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25119 /* type0End would have been initially (during subfrm Init) at the bit position
25120 * (cell->noOfRbgs - 1), 0 being the most significant.
25121 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25122 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - dlSf->type0End));
25123 /* Move backwards the type0End pivot */
25124 dlSf->type0End -= noRbgs;
25125 /*Fix for ccpu00123919*/
25126 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25127 /* Update the bwAlloced field accordingly */
25128 //#ifndef LTEMAC_SPS /* ccpu00129474*/
25129 dlSf->bwAlloced += noRbs;
25131 /* Update Type0 Alloc Info */
25132 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25133 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25134 allocInfo->rbsAlloc = noRbs;
25136 /* Update Tb info for each scheduled TB */
25137 iTbs = allocInfo->tbInfo[0].iTbs;
25138 noLyr = allocInfo->tbInfo[0].noLyr;
25139 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25140 * RETX TB Size is same as Init TX TB Size */
25141 if (allocInfo->tbInfo[0].tbCb->txCntr)
25143 allocInfo->tbInfo[0].bytesAlloc =
25144 allocInfo->tbInfo[0].bytesReq;
25148 allocInfo->tbInfo[0].bytesAlloc =
25149 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25150 /* DwPts Scheduling Changes Start */
25152 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25154 allocInfo->tbInfo[0].bytesAlloc =
25155 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25158 /* DwPts Scheduling Changes End */
25161 if (allocInfo->tbInfo[1].schdlngForTb)
25163 iTbs = allocInfo->tbInfo[1].iTbs;
25164 noLyr = allocInfo->tbInfo[1].noLyr;
25165 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25166 * RETX TB Size is same as Init TX TB Size */
25167 if (allocInfo->tbInfo[1].tbCb->txCntr)
25169 allocInfo->tbInfo[1].bytesAlloc =
25170 allocInfo->tbInfo[1].bytesReq;
25174 allocInfo->tbInfo[1].bytesAlloc =
25175 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25176 /* DwPts Scheduling Changes Start */
25178 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25180 allocInfo->tbInfo[1].bytesAlloc =
25181 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25184 /* DwPts Scheduling Changes End */
25188 /* The last RBG which can be smaller than the RBG size is consedered
25189 * only for the first time allocation of TYPE0 UE */
25190 dlSf->lstRbgDfct = 0;
25196 * @brief To prepare RNTP value from the PRB allocation (P-High -> 1 and P-Low -> 0)
25200 * Function : rgSCHCmnBuildRntpInfo
25202 * Processing Steps:
25204 * @param[in] U8 *rntpPtr
25205 * @param[in] U8 startRb
25206 * @param[in] U8 numRb
25211 PRIVATE S16 rgSCHCmnBuildRntpInfo
25220 PRIVATE S16 rgSCHCmnBuildRntpInfo(cell, rntpPtr, startRb, nmbRb, bw)
25228 U16 rbPtrStartIdx; /* Start Index of Octete Buffer to be filled */
25229 U16 rbPtrEndIdx; /* End Index of Octete Buffer to be filled */
25230 U16 rbBitLoc; /* Bit Location to be set as 1 in the current Byte */
25231 U16 nmbRbPerByte; /* PRB's to be set in the current Byte (in case of multiple Bytes) */
25233 TRC2(rgSCHCmnBuildRntpInfo);
25235 rbPtrStartIdx = (startRb)/8;
25236 rbPtrEndIdx = (startRb + nmbRb)/8;
25238 if (rntpPtr == NULLP)
25240 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
25241 "rgSCHCmnBuildRntpInfo():"
25242 "rntpPtr can't be NULLP (Memory Allocation Failed)");
25246 while(rbPtrStartIdx <= rbPtrEndIdx)
25248 rbBitLoc = (startRb)%8;
25250 /* case 1: startRb and endRb lies in same Byte */
25251 if (rbPtrStartIdx == rbPtrEndIdx)
25253 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25254 | (((1<<nmbRb)-1)<<rbBitLoc);
25257 /* case 2: startRb and endRb lies in different Byte */
25258 if (rbPtrStartIdx != rbPtrEndIdx)
25260 nmbRbPerByte = 8 - rbBitLoc;
25261 nmbRb = nmbRb - nmbRbPerByte;
25262 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25263 | (((1<<nmbRbPerByte)-1)<<rbBitLoc);
25264 startRb = startRb + nmbRbPerByte;
25270 /* dsfr_pal_fixes ** 21-March-2013 ** SKS ** Adding Debug logs */
25272 /* dsfr_pal_fixes ** 25-March-2013 ** SKS ** Adding Debug logs to print RNTP */
25279 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
25283 * Function : rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25285 * Processing Steps:
25287 * @param[in] RgSchCellCb *cell
25288 * @param[in] RgSchDlSf *dlSf
25289 * @param[in] U8 rbStrt
25290 * @param[in] U8 numRb
25295 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25300 RgSchSFRPoolInfo *sfrPool,
25305 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrPool, rbStrt, numRb)
25309 RgSchSFRPoolInfo *sfrPool;
25318 TRC2(rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc);
25319 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25320 sfrPool->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25323 dlSf->type2Start += numRb;
25324 dlSf->bwAlloced += numRb;
25326 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
25328 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
25329 if(FALSE == ue->lteAdvUeCb.rgrLteAdvUeCfg.isUeCellEdge)
25331 if((sfrPool->type2Start >= sfrPool->pwrHiCCRange.startRb) &&
25332 (sfrPool->type2Start + numRb < sfrPool->pwrHiCCRange.endRb))
25334 ue->lteAdvUeCb.isCCUePHigh = TRUE;
25336 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25337 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25340 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25341 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25348 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25349 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25352 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25353 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25358 sfrPool->type2Start += numRb;
25359 sfrPool->bwAlloced += numRb;
25366 * @brief To do DL allocation using TYPE0 RA.
25370 * Function : rgSCHCmnNonDlfsSFRPoolType0Alloc
25372 * Processing Steps:
25373 * - Perform TYPE0 allocation using the RBGs between type0End and type2End.
25374 * - Build the allocation mask as per RBG positioning.
25375 * - Update the allocation parameters.
25377 * @param[in] RgSchCellCb *cell
25378 * @param[in] RgSchDlSf *dlSf
25379 * @param[in] RgSchDlRbAlloc *allocInfo
25384 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc
25388 RgSchSFRPoolInfo *poolInfo,
25389 RgSchDlRbAlloc *allocInfo
25392 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, poolInfo, allocInfo)
25395 RgSchSFRPoolInfo *poolInfo;
25396 RgSchDlRbAlloc *allocInfo;
25399 U32 dlAllocMsk = 0;
25406 TRC2(rgSCHCmnNonDlfsSFRPoolType0Alloc);
25408 if (poolInfo->poolstartRB + poolInfo->bw == dlSf->bw)
25410 if (poolInfo->type0End == dlSf->bw/4)
25412 rbgFiller = dlSf->lstRbgDfct;
25413 /* The last RBG which can be smaller than the RBG size is consedered
25414 * only for the first time allocation of TYPE0 UE */
25415 dlSf->lstRbgDfct = 0;
25419 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25421 /* Abhinav to-do start */
25422 /* MS_FIX for ccpu00123919*/
25423 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25424 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25430 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25432 /* Abhinav to-do end */
25436 /* type0End would have been initially (during subfrm Init) at the bit position
25437 * (cell->noOfRbgs - 1), 0 being the most significant.
25438 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25439 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - poolInfo->type0End));
25440 /* Move backwards the type0End pivot */
25441 poolInfo->type0End -= noRbgs;
25442 /*MS_FIX for ccpu00123919*/
25443 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25444 /* Update the bwAlloced field accordingly */
25445 poolInfo->bwAlloced += noRbs + dlSf->lstRbgDfct;
25446 dlSf->bwAlloced += noRbs + dlSf->lstRbgDfct;
25448 /* Update Type0 Alloc Info */
25449 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25450 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25451 allocInfo->rbsAlloc = noRbs;
25453 /* Update Tb info for each scheduled TB */
25454 iTbs = allocInfo->tbInfo[0].iTbs;
25455 noLyr = allocInfo->tbInfo[0].noLyr;
25456 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25457 * RETX TB Size is same as Init TX TB Size */
25458 if (allocInfo->tbInfo[0].tbCb->txCntr)
25460 allocInfo->tbInfo[0].bytesAlloc =
25461 allocInfo->tbInfo[0].bytesReq;
25465 allocInfo->tbInfo[0].bytesAlloc =
25466 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25469 if (allocInfo->tbInfo[1].schdlngForTb)
25471 iTbs = allocInfo->tbInfo[1].iTbs;
25472 noLyr = allocInfo->tbInfo[1].noLyr;
25473 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25474 * RETX TB Size is same as Init TX TB Size */
25475 if (allocInfo->tbInfo[1].tbCb->txCntr)
25477 allocInfo->tbInfo[1].bytesAlloc =
25478 allocInfo->tbInfo[1].bytesReq;
25482 allocInfo->tbInfo[1].bytesAlloc =
25483 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25487 /* The last RBG which can be smaller than the RBG size is consedered
25488 * only for the first time allocation of TYPE0 UE */
25489 dlSf->lstRbgDfct = 0;
25494 * @brief Computes RNTP Info for a subframe.
25498 * Function : rgSCHCmnNonDlfsDsfrRntpComp
25500 * Processing Steps:
25501 * - Computes RNTP info from individual pools.
25503 * @param[in] RgSchDlSf *dlSf
25509 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp
25515 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp(cell, dlSf)
25520 PRIVATE U16 samples = 0;
25522 U16 bwBytes = (dlSf->bw-1)/8;
25523 RgrLoadInfIndInfo *rgrLoadInf;
25527 TRC2(rgSCHCmnNonDlfsDsfrRntpComp);
25529 len = (dlSf->bw % 8 == 0) ? dlSf->bw/8 : dlSf->bw/8 + 1;
25531 /* RNTP info is ORed every TTI and the sample is stored in cell control block */
25532 for(i = 0; i <= bwBytes; i++)
25534 cell->rntpAggrInfo.val[i] |= dlSf->rntpInfo.val[i];
25536 samples = samples + 1;
25537 /* After every 1000 ms, the RNTP info will be sent to application to be further sent to all neighbouring eNB
25538 informing them about the load indication for cell edge users */
25539 if(RG_SCH_MAX_RNTP_SAMPLES == samples)
25542 ret = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&rgrLoadInf,
25543 sizeof(RgrLoadInfIndInfo));
25546 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
25547 "allocate memory for sending LoadInfo");
25551 rgrLoadInf->u.rntpInfo.pres = cell->rntpAggrInfo.pres;
25552 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25553 rgrLoadInf->u.rntpInfo.len = len;
25555 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25556 rgrLoadInf->u.rntpInfo.val = cell->rntpAggrInfo.val;
25557 rgrLoadInf->cellId = cell->cellId;
25559 /* dsfr_pal_fixes ** 22-March-2013 ** SKS */
25560 rgrLoadInf->bw = dlSf->bw;
25561 rgrLoadInf->type = RGR_SFR;
25563 ret = rgSCHUtlRgrLoadInfInd(cell, rgrLoadInf);
25566 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsDsfrRntpComp():"
25567 "rgSCHUtlRgrLoadInfInd() returned RFAILED");
25570 cmMemset(cell->rntpAggrInfo.val,0,len);
25574 /* LTE_ADV_FLAG_REMOVED_END */
25576 /* LTE_ADV_FLAG_REMOVED_START */
25578 * @brief Performs RB allocation per UE from a pool.
25582 * Function : rgSCHCmnSFRNonDlfsUeRbAlloc
25584 * Processing Steps:
25585 * - Allocate consecutively available RBs.
25587 * @param[in] RgSchCellCb *cell
25588 * @param[in] RgSchUeCb *ue
25589 * @param[in] RgSchDlSf *dlSf
25590 * @param[out] U8 *isDlBwAvail
25597 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc
25605 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25612 RgSchDlRbAlloc *allocInfo;
25613 RgSchCmnDlUe *dlUe;
25615 RgSchSFRPoolInfo *sfrpoolInfo = NULLP;
25617 TRC2(rgSCHCmnSFRNonDlfsUeRbAlloc);
25619 isUECellEdge = RG_SCH_CMN_IS_UE_CELL_EDGE(ue);
25621 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25622 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25623 *isDlBwAvail = TRUE;
25625 /*Find which pool is available for this UE*/
25626 if (rgSCHCmnNonDlfsSFRBwAvlbl(cell, &sfrpoolInfo, dlSf, allocInfo, isUECellEdge) != TRUE)
25628 /* SFR_FIX - If this is CE UE there may be BW available in CC Pool
25629 So CC UEs will be scheduled */
25632 *isDlBwAvail = TRUE;
25636 *isDlBwAvail = FALSE;
25641 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX || dlUe->proc->tbInfo[1].isAckNackDtx)
25643 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25647 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25650 if (!(allocInfo->pdcch))
25652 /* Returning ROK since PDCCH might be available for another UE and further allocations could be done */
25657 allocInfo->rnti = ue->ueId;
25660 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
25662 allocInfo->allocInfo.raType2.isLocal = TRUE;
25663 /* rg004.201 patch - ccpu00109921 fix end */
25664 /* MS_FIX for ccpu00123918*/
25665 allocInfo->allocInfo.raType2.rbStart = (U8)sfrpoolInfo->type2Start;
25666 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25667 /* rg007.201 - Changes for MIMO feature addition */
25668 /* rg008.201 - Removed dependency on MIMO compile-time flag */
25669 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrpoolInfo, \
25670 allocInfo->allocInfo.raType2.rbStart, \
25671 allocInfo->allocInfo.raType2.numRb);
25672 allocInfo->rbsAlloc = allocInfo->rbsReq;
25673 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25675 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
25677 rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, sfrpoolInfo, allocInfo);
25681 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,0);
25682 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
25684 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,1);
25689 #if defined(LTEMAC_SPS)
25690 /* Update the sub-frame with new allocation */
25691 dlSf->bwAlloced += allocInfo->rbsReq;
25696 /* LTE_ADV_FLAG_REMOVED_END */
25697 #endif /* LTE_TDD */
25700 * @brief Performs RB allocation per UE for frequency non-selective cell.
25704 * Function : rgSCHCmnNonDlfsUeRbAlloc
25706 * Processing Steps:
25707 * - Allocate consecutively available RBs.
25709 * @param[in] RgSchCellCb *cell
25710 * @param[in] RgSchUeCb *ue
25711 * @param[in] RgSchDlSf *dlSf
25712 * @param[out] U8 *isDlBwAvail
25719 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc
25727 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25734 RgSchDlRbAlloc *allocInfo;
25735 RgSchCmnDlUe *dlUe;
25739 TRC2(rgSCHCmnNonDlfsUeRbAlloc);
25742 RgSch5gtfUeCb *ue5gtfCb = &(ue->ue5gtfCb);
25743 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[ue5gtfCb->BeamId]);
25745 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25746 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25747 *isDlBwAvail = TRUE;
25749 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
25751 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25752 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
25754 printf("5GTF_ERROR vrbg allocated > 25\n");
25758 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX
25759 || dlUe->proc->tbInfo[1].isAckNackDtx)
25761 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25765 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25767 if (!(allocInfo->pdcch))
25769 /* Returning ROK since PDCCH might be available for another UE and
25770 * further allocations could be done */
25771 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25772 "5GTF_ERROR : PDCCH allocation failed :ue (%u)",
25774 printf("5GTF_ERROR PDCCH allocation failed\n");
25778 //maxPrb = RGSCH_MIN((allocInfo->vrbgReq * MAX_5GTF_VRBG_SIZE), ue5gtfCb->maxPrb);
25779 //maxPrb = RGSCH_MIN(maxPrb,
25780 //((beamInfo->totVrbgAvail - beamInfo->vrbgStart)* MAX_5GTF_VRBG_SIZE)));
25781 //TODO_SID Need to check for vrbg available after scheduling for same beam.
25782 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
25783 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
25784 //TODO_SID: Setting for max TP
25785 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
25786 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
25787 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
25788 allocInfo->tbInfo[0].tbCb->dlGrnt.SCID = 0;
25789 allocInfo->tbInfo[0].tbCb->dlGrnt.dciFormat = allocInfo->dciFormat;
25790 //Filling temporarily
25791 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
25792 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
25794 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25795 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25796 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25804 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
25808 * Function : rgSCHCmnNonDlfsCcchSduAlloc
25810 * Processing Steps:
25811 * - For each element in the list, Call rgSCHCmnNonDlfsCcchSduRbAlloc().
25812 * - If allocation is successful, add the ueCb to scheduled list of CCCH
25814 * - else, add UeCb to non-scheduled list.
25816 * @param[in] RgSchCellCb *cell
25817 * @param[in, out] RgSchCmnCcchSduRbAlloc *allocInfo
25818 * @param[in] U8 isRetx
25823 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc
25826 RgSchCmnCcchSduRbAlloc *allocInfo,
25830 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc(cell, allocInfo, isRetx)
25832 RgSchCmnCcchSduRbAlloc *allocInfo;
25837 CmLListCp *ccchSduLst = NULLP;
25838 CmLListCp *schdCcchSduLst = NULLP;
25839 CmLListCp *nonSchdCcchSduLst = NULLP;
25840 CmLList *schdLnkNode = NULLP;
25841 CmLList *toBeSchdLnk = NULLP;
25842 RgSchDlSf *dlSf = allocInfo->ccchSduDlSf;
25843 RgSchUeCb *ueCb = NULLP;
25844 RgSchDlHqProcCb *hqP = NULLP;
25845 TRC2(rgSCHCmnNonDlfsCcchSduAlloc);
25849 /* Initialize re-transmitting lists */
25850 ccchSduLst = &(allocInfo->ccchSduRetxLst);
25851 schdCcchSduLst = &(allocInfo->schdCcchSduRetxLst);
25852 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduRetxLst);
25856 /* Initialize transmitting lists */
25857 ccchSduLst = &(allocInfo->ccchSduTxLst);
25858 schdCcchSduLst = &(allocInfo->schdCcchSduTxLst);
25859 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduTxLst);
25862 /* Perform allocaations for the list */
25863 toBeSchdLnk = cmLListFirst(ccchSduLst);
25864 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
25866 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25867 ueCb = hqP->hqE->ue;
25868 schdLnkNode = &hqP->schdLstLnk;
25869 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25870 ret = rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf);
25873 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
25874 * list and return */
25877 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25878 ueCb = hqP->hqE->ue;
25879 schdLnkNode = &hqP->schdLstLnk;
25880 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25881 cmLListAdd2Tail(nonSchdCcchSduLst, schdLnkNode);
25882 toBeSchdLnk = toBeSchdLnk->next;
25883 } while(toBeSchdLnk);
25887 /* Allocation successful: Add UE to the scheduled list */
25888 cmLListAdd2Tail(schdCcchSduLst, schdLnkNode);
25896 * @brief Performs RB allocation for CcchSdu for frequency non-selective cell.
25900 * Function : rgSCHCmnNonDlfsCcchSduRbAlloc
25902 * Processing Steps:
25904 * - Allocate consecutively available RBs
25906 * @param[in] RgSchCellCb *cell
25907 * @param[in] RgSchUeCb *ueCb
25908 * @param[in] RgSchDlSf *dlSf
25914 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc
25921 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf)
25927 RgSchDlRbAlloc *allocInfo;
25928 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
25930 TRC2(rgSCHCmnNonDlfsCcchSduRbAlloc);
25933 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb,cell);
25935 /* [ccpu00138802]-MOD-If Bw is less than required, return fail
25936 It will be allocated in next TTI */
25938 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
25939 (dlSf->bwAlloced == dlSf->bw))
25941 if((dlSf->bwAlloced == dlSf->bw) ||
25942 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
25947 /* Retrieve PDCCH */
25948 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
25949 if (ueDl->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
25951 /* allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, dlSf, y, ueDl->cqi,
25952 * TFU_DCI_FORMAT_1A, TRUE);*/
25953 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, TRUE);
25957 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE);
25959 if (!(allocInfo->pdcch))
25961 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
25965 /* Update allocation information */
25966 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
25967 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
25968 allocInfo->allocInfo.raType2.isLocal = TRUE;
25970 /*Fix for ccpu00123918*/
25971 /* Push this harq process back to the free queue */
25972 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
25973 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25974 allocInfo->rbsAlloc = allocInfo->rbsReq;
25975 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25976 /* Update the sub-frame with new allocation */
25978 /* LTE_ADV_FLAG_REMOVED_START */
25980 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
25982 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf,
25983 allocInfo->allocInfo.raType2.rbStart,
25984 allocInfo->allocInfo.raType2.numRb);
25987 #endif /* end of ifndef LTE_TDD*/
25989 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf,
25990 allocInfo->allocInfo.raType2.rbStart,
25991 allocInfo->allocInfo.raType2.numRb);
25994 /* LTE_ADV_FLAG_REMOVED_END */
25995 /* ccpu00131941 - bwAlloced is updated from SPS bandwidth */
26003 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
26007 * Function : rgSCHCmnNonDlfsMsg4RbAlloc
26009 * Processing Steps:
26011 * - Allocate consecutively available RBs
26013 * @param[in] RgSchCellCb *cell
26014 * @param[in] RgSchRaCb *raCb
26015 * @param[in] RgSchDlSf *dlSf
26021 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc
26028 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf)
26034 RgSchDlRbAlloc *allocInfo;
26035 TRC2(rgSCHCmnNonDlfsMsg4RbAlloc);
26038 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_RACB(raCb);
26041 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
26042 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
26044 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
26045 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
26047 printf("5GTF_ERROR vrbg allocated > 25\n");
26052 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
26053 (dlSf->bwAlloced == dlSf->bw))
26055 if((dlSf->bwAlloced == dlSf->bw) ||
26056 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
26063 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
26064 if (raCb->dlHqE->msg4Proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
26066 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, TRUE);
26070 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, FALSE);
26072 if (!(allocInfo->pdcch))
26074 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
26079 /* SR_RACH_STATS : MSG4 TX Failed */
26080 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26082 /* Update allocation information */
26083 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
26084 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
26085 allocInfo->allocInfo.raType2.isLocal = TRUE;
26088 /*Fix for ccpu00123918*/
26089 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
26090 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
26091 /* LTE_ADV_FLAG_REMOVED_START */
26093 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
26095 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
26096 allocInfo->allocInfo.raType2.rbStart, \
26097 allocInfo->allocInfo.raType2.numRb);
26100 #endif /* end of ifndef LTE_TDD */
26102 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
26103 allocInfo->allocInfo.raType2.rbStart, \
26104 allocInfo->allocInfo.raType2.numRb);
26106 /* LTE_ADV_FLAG_REMOVED_END */
26108 allocInfo->rbsAlloc = allocInfo->rbsReq;
26109 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26113 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26115 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
26116 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
26118 /* Update allocation information */
26119 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26121 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
26122 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
26123 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
26125 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
26126 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
26129 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26130 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26131 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26139 * @brief Performs RB allocation for Msg4 lists of frequency non-selective cell.
26143 * Function : rgSCHCmnNonDlfsMsg4Alloc
26145 * Processing Steps:
26146 * - For each element in the list, Call rgSCHCmnNonDlfsMsg4RbAlloc().
26147 * - If allocation is successful, add the raCb to scheduled list of MSG4.
26148 * - else, add RaCb to non-scheduled list.
26150 * @param[in] RgSchCellCb *cell
26151 * @param[in, out] RgSchCmnMsg4RbAlloc *allocInfo
26152 * @param[in] U8 isRetx
26157 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc
26160 RgSchCmnMsg4RbAlloc *allocInfo,
26164 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc(cell, allocInfo, isRetx)
26166 RgSchCmnMsg4RbAlloc *allocInfo;
26171 CmLListCp *msg4Lst = NULLP;
26172 CmLListCp *schdMsg4Lst = NULLP;
26173 CmLListCp *nonSchdMsg4Lst = NULLP;
26174 CmLList *schdLnkNode = NULLP;
26175 CmLList *toBeSchdLnk = NULLP;
26176 RgSchDlSf *dlSf = allocInfo->msg4DlSf;
26177 RgSchRaCb *raCb = NULLP;
26178 RgSchDlHqProcCb *hqP = NULLP;
26179 TRC2(rgSCHCmnNonDlfsMsg4Alloc);
26183 /* Initialize re-transmitting lists */
26184 msg4Lst = &(allocInfo->msg4RetxLst);
26185 schdMsg4Lst = &(allocInfo->schdMsg4RetxLst);
26186 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4RetxLst);
26190 /* Initialize transmitting lists */
26191 msg4Lst = &(allocInfo->msg4TxLst);
26192 schdMsg4Lst = &(allocInfo->schdMsg4TxLst);
26193 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4TxLst);
26196 /* Perform allocaations for the list */
26197 toBeSchdLnk = cmLListFirst(msg4Lst);
26198 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26200 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26201 raCb = hqP->hqE->raCb;
26202 schdLnkNode = &hqP->schdLstLnk;
26203 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26204 ret = rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf);
26207 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
26208 * list and return */
26211 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26212 raCb = hqP->hqE->raCb;
26213 schdLnkNode = &hqP->schdLstLnk;
26214 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26215 cmLListAdd2Tail(nonSchdMsg4Lst, schdLnkNode);
26216 toBeSchdLnk = toBeSchdLnk->next;
26217 } while(toBeSchdLnk);
26221 /* Allocation successful: Add UE to the scheduled list */
26222 cmLListAdd2Tail(schdMsg4Lst, schdLnkNode);
26233 * @brief Performs RB allocation for the list of UEs of a frequency
26234 * non-selective cell.
26238 * Function : rgSCHCmnNonDlfsDedRbAlloc
26240 * Processing Steps:
26241 * - For each element in the list, Call rgSCHCmnNonDlfsUeRbAlloc().
26242 * - If allocation is successful, add the ueCb to scheduled list of UEs.
26243 * - else, add ueCb to non-scheduled list of UEs.
26245 * @param[in] RgSchCellCb *cell
26246 * @param[in, out] RgSchCmnUeRbAlloc *allocInfo
26247 * @param[in] CmLListCp *ueLst,
26248 * @param[in, out] CmLListCp *schdHqPLst,
26249 * @param[in, out] CmLListCp *nonSchdHqPLst
26254 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc
26257 RgSchCmnUeRbAlloc *allocInfo,
26259 CmLListCp *schdHqPLst,
26260 CmLListCp *nonSchdHqPLst
26263 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc(cell, allocInfo, ueLst,
26264 schdHqPLst, nonSchdHqPLst)
26266 RgSchCmnUeRbAlloc *allocInfo;
26268 CmLListCp *schdHqPLst;
26269 CmLListCp *nonSchdHqPLst;
26273 CmLList *schdLnkNode = NULLP;
26274 CmLList *toBeSchdLnk = NULLP;
26275 RgSchDlSf *dlSf = allocInfo->dedDlSf;
26276 RgSchUeCb *ue = NULLP;
26277 RgSchDlHqProcCb *hqP = NULLP;
26279 TRC2(rgSCHCmnNonDlfsDedRbAlloc);
26282 /* Perform allocaations for the list */
26283 toBeSchdLnk = cmLListFirst(ueLst);
26284 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26286 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26288 schdLnkNode = &hqP->schdLstLnk;
26289 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26291 ret = rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, &isDlBwAvail);
26294 /* Allocation failed: Add remaining UEs to non-scheduled
26295 * list and return */
26298 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26300 schdLnkNode = &hqP->schdLstLnk;
26301 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26302 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26303 toBeSchdLnk = toBeSchdLnk->next;
26304 } while(toBeSchdLnk);
26310 #if defined (TENB_STATS) && defined (RG_5GTF)
26311 cell->tenbStats->sch.dl5gtfRbAllocPass++;
26313 /* Allocation successful: Add UE to the scheduled list */
26314 cmLListAdd2Tail(schdHqPLst, schdLnkNode);
26318 #if defined (TENB_STATS) && defined (RG_5GTF)
26319 cell->tenbStats->sch.dl5gtfRbAllocFail++;
26321 /* Allocation failed : Add UE to the non-scheduled list */
26322 printf("5GTF_ERROR Dl rb alloc failed adding nonSchdHqPLst\n");
26323 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26331 * @brief Handles RB allocation for frequency non-selective cell.
26335 * Function : rgSCHCmnNonDlfsRbAlloc
26337 * Invoking Module Processing:
26338 * - SCH shall invoke this if downlink frequency selective is disabled for
26339 * the cell for RB allocation.
26340 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
26341 * estimate and subframe for each allocation to be made to SCH.
26343 * Processing Steps:
26344 * - Allocate sequentially for common channels.
26345 * - For transmitting and re-transmitting UE list.
26347 * - Perform wide-band allocations for UE in increasing order of
26349 * - Determine Imcs for the allocation.
26350 * - Determine RA type.
26351 * - Determine DCI format.
26353 * @param[in] RgSchCellCb *cell
26354 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
26359 PUBLIC Void rgSCHCmnNonDlfsRbAlloc
26362 RgSchCmnDlRbAllocInfo *allocInfo
26365 PUBLIC Void rgSCHCmnNonDlfsRbAlloc(cell, allocInfo)
26367 RgSchCmnDlRbAllocInfo *allocInfo;
26371 RgSchDlRbAlloc *reqAllocInfo;
26372 TRC2(rgSCHCmnNonDlfsRbAlloc);
26374 /* Allocate for MSG4 retransmissions */
26375 if (allocInfo->msg4Alloc.msg4RetxLst.count)
26377 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc RetxLst\n");
26378 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), TRUE);
26381 /* Allocate for MSG4 transmissions */
26382 /* Assuming all the nodes in the list need allocations: rbsReq is valid */
26383 if (allocInfo->msg4Alloc.msg4TxLst.count)
26385 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc txLst\n");
26386 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), FALSE);
26389 /* Allocate for CCCH SDU (received after guard timer expiry)
26390 * retransmissions */
26391 if (allocInfo->ccchSduAlloc.ccchSduRetxLst.count)
26393 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26394 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), TRUE);
26397 /* Allocate for CCCD SDU transmissions */
26398 /* Allocate for CCCH SDU (received after guard timer expiry) transmissions */
26399 if (allocInfo->ccchSduAlloc.ccchSduTxLst.count)
26401 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26402 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), FALSE);
26406 /* Allocate for Random access response */
26407 for (raRspCnt = 0; raRspCnt < RG_SCH_CMN_MAX_CMN_PDCCH; ++raRspCnt)
26409 /* Assuming that the requests will be filled in sequentially */
26410 reqAllocInfo = &(allocInfo->raRspAlloc[raRspCnt]);
26411 if (!reqAllocInfo->rbsReq)
26415 printf("5GTF_ERROR calling RAR rgSCHCmnNonDlfsCmnRbAlloc\n");
26416 // if ((rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo)) != ROK)
26417 if ((rgSCHCmnNonDlfsCmnRbAllocRar(cell, reqAllocInfo)) != ROK)
26423 /* Allocate for RETX+TX UEs */
26424 if(allocInfo->dedAlloc.txRetxHqPLst.count)
26426 printf("5GTF_ERROR TX RETX rgSCHCmnNonDlfsDedRbAlloc\n");
26427 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26428 &(allocInfo->dedAlloc.txRetxHqPLst),
26429 &(allocInfo->dedAlloc.schdTxRetxHqPLst),
26430 &(allocInfo->dedAlloc.nonSchdTxRetxHqPLst));
26433 if((allocInfo->dedAlloc.retxHqPLst.count))
26435 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26436 &(allocInfo->dedAlloc.retxHqPLst),
26437 &(allocInfo->dedAlloc.schdRetxHqPLst),
26438 &(allocInfo->dedAlloc.nonSchdRetxHqPLst));
26441 /* Allocate for transmitting UEs */
26442 if((allocInfo->dedAlloc.txHqPLst.count))
26444 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26445 &(allocInfo->dedAlloc.txHqPLst),
26446 &(allocInfo->dedAlloc.schdTxHqPLst),
26447 &(allocInfo->dedAlloc.nonSchdTxHqPLst));
26450 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cell);
26451 if ((allocInfo->dedAlloc.txRetxHqPLst.count +
26452 allocInfo->dedAlloc.retxHqPLst.count +
26453 allocInfo->dedAlloc.txHqPLst.count) >
26454 cmnCell->dl.maxUePerDlSf)
26456 #ifndef ALIGN_64BIT
26457 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26458 " scheduler exceed maximumUePerDlSf(%u)tx-retx %ld retx %ld tx %ld\n",
26459 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26460 allocInfo->dedAlloc.retxHqPLst.count,
26461 allocInfo->dedAlloc.txHqPLst.count));
26463 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26464 " scheduler exceed maximumUePerDlSf(%u)tx-retx %d retx %d tx %d\n",
26465 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26466 allocInfo->dedAlloc.retxHqPLst.count,
26467 allocInfo->dedAlloc.txHqPLst.count));
26472 /* LTE_ADV_FLAG_REMOVED_START */
26473 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
26475 printf("5GTF_ERROR RETX rgSCHCmnNonDlfsDsfrRntpComp\n");
26476 rgSCHCmnNonDlfsDsfrRntpComp(cell, allocInfo->dedAlloc.dedDlSf);
26478 /* LTE_ADV_FLAG_REMOVED_END */
26479 #endif /* LTE_TDD */
26483 /***********************************************************
26485 * Func : rgSCHCmnCalcRiv
26487 * Desc : This function calculates RIV.
26493 * File : rg_sch_utl.c
26495 **********************************************************/
26498 PUBLIC U32 rgSCHCmnCalcRiv
26505 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26512 PUBLIC U32 rgSCHCmnCalcRiv
26519 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26526 U8 numRbMinus1 = numRb - 1;
26529 TRC2(rgSCHCmnCalcRiv);
26531 if (numRbMinus1 <= bw/2)
26533 riv = bw * numRbMinus1 + rbStart;
26537 riv = bw * (bw - numRbMinus1) + (bw - rbStart - 1);
26540 } /* rgSCHCmnCalcRiv */
26544 * @brief This function allocates and copies the RACH response scheduling
26545 * related information into cell control block.
26549 * Function: rgSCHCmnDlCpyRachInfo
26550 * Purpose: This function allocates and copies the RACH response
26551 * scheduling related information into cell control block
26552 * for each DL subframe.
26555 * Invoked by: Scheduler
26557 * @param[in] RgSchCellCb* cell
26558 * @param[in] RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES]
26559 * @param[in] U8 raArrSz
26564 PRIVATE S16 rgSCHCmnDlCpyRachInfo
26567 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
26571 PRIVATE S16 rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz)
26573 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES];
26577 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
26586 TRC2(rgSCHCmnDlCpyRachInfo);
26588 /* Allocate RACH response information for each DL
26589 * subframe in a radio frame */
26590 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cell->rachRspLst,
26591 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1] *
26592 sizeof(RgSchTddRachRspLst));
26598 for(sfnIdx=raArrSz-1; sfnIdx>=0; sfnIdx--)
26600 for(subfrmIdx=0; subfrmIdx < RGSCH_NUM_SUB_FRAMES; subfrmIdx++)
26602 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
26603 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
26608 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx],subfrmIdx);
26610 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26612 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddNumDlSubfrmTbl[ulDlCfgIdx],subfrmIdx);
26613 sfNum = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][subfrmIdx]-1;
26614 numRfs = cell->rachRspLst[sfNum].numRadiofrms;
26615 /* For each DL subframe in which RACH response can
26616 * be sent is updated */
26619 cell->rachRspLst[sfNum].rachRsp[numRfs].sfnOffset =
26620 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset;
26621 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26623 cell->rachRspLst[sfNum].rachRsp[numRfs].\
26624 subframe[sfcount] =
26625 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].\
26628 cell->rachRspLst[sfNum].rachRsp[numRfs].numSubfrms =
26629 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26630 cell->rachRspLst[sfNum].numRadiofrms++;
26633 /* Copy the subframes to be deleted at ths subframe */
26635 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26638 cell->rachRspLst[sfNum].delInfo.sfnOffset =
26639 rachRspLst[sfnIdx][subfrmIdx].delInfo.sfnOffset;
26640 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26642 cell->rachRspLst[sfNum].delInfo.subframe[sfcount] =
26643 rachRspLst[sfnIdx][subfrmIdx].delInfo.subframe[sfcount];
26645 cell->rachRspLst[sfNum].delInfo.numSubfrms =
26646 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26654 * @brief This function determines the iTbs based on the new CFI,
26655 * CQI and BLER based delta iTbs
26659 * Function: rgSchCmnFetchItbs
26660 * Purpose: Fetch the new iTbs when CFI changes.
26662 * @param[in] RgSchCellCb *cell
26663 * @param[in] RgSchCmnDlUe *ueDl
26664 * @param[in] U8 cqi
26671 PRIVATE S32 rgSchCmnFetchItbs
26674 RgSchCmnDlUe *ueDl,
26682 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, subFrm, cqi, cfi, cwIdx, noLyr)
26684 RgSchCmnDlUe *ueDl;
26693 PRIVATE S32 rgSchCmnFetchItbs
26696 RgSchCmnDlUe *ueDl,
26703 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, cqi, cfi, cwIdx, noLyr)
26705 RgSchCmnDlUe *ueDl;
26714 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
26717 TRC2(rgSchCmnFetchItbs);
26720 /* Special Handling for Spl Sf when CFI is 3 as
26721 * CFI in Spl Sf will be max 2 */
26722 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
26724 if((cellDl->currCfi == 3) ||
26725 ((cell->bwCfg.dlTotalBw <= 10) && (cellDl->currCfi == 1)))
26727 /* Use CFI 2 in this case */
26728 iTbs = (ueDl->laCb[cwIdx].deltaiTbs +
26729 ((*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][2]))[cqi])* 100)/100;
26731 RG_SCH_CHK_ITBS_RANGE(iTbs, RGSCH_NUM_ITBS - 1);
26735 iTbs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1];
26737 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26739 else /* CFI Changed. Update with new iTbs Reset the BLER*/
26742 S32 tmpiTbs = (*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][cfi]))[cqi];
26744 iTbs = (ueDl->laCb[cwIdx].deltaiTbs + tmpiTbs*100)/100;
26746 RG_SCH_CHK_ITBS_RANGE(iTbs, tmpiTbs);
26748 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26750 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1] = iTbs;
26752 ueDl->lastCfi = cfi;
26753 ueDl->laCb[cwIdx].deltaiTbs = 0;
26760 * @brief This function determines the RBs and Bytes required for BO
26761 * transmission for UEs configured with TM 1/2/6/7.
26765 * Function: rgSCHCmnDlAllocTxRb1Tb1Cw
26766 * Purpose: Allocate TB1 on CW1.
26768 * Reference Parameter effBo is filled with alloced bytes.
26769 * Returns RFAILED if BO not satisfied at all.
26771 * Invoked by: rgSCHCmnDlAllocTxRbTM1/2/6/7
26773 * @param[in] RgSchCellCb *cell
26774 * @param[in] RgSchDlSf *subFrm
26775 * @param[in] RgSchUeCb *ue
26776 * @param[in] U32 bo
26777 * @param[out] U32 *effBo
26778 * @param[in] RgSchDlHqProcCb *proc
26779 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26784 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw
26791 RgSchDlHqProcCb *proc,
26792 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26795 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26801 RgSchDlHqProcCb *proc;
26802 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26805 RgSchDlRbAlloc *allocInfo;
26808 TRC2(rgSCHCmnDlAllocTxRb1Tb1Cw);
26811 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26813 if (ue->ue5gtfCb.rank == 2)
26815 allocInfo->dciFormat = TFU_DCI_FORMAT_B2;
26819 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26822 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26823 allocInfo->raType);
26825 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
26826 bo, &numRb, effBo);
26827 if (ret == RFAILED)
26829 /* If allocation couldn't be made then return */
26832 /* Adding UE to RbAllocInfo TX Lst */
26833 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
26834 /* Fill UE alloc Info */
26835 allocInfo->rbsReq = numRb;
26836 allocInfo->dlSf = subFrm;
26838 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26846 * @brief This function determines the RBs and Bytes required for BO
26847 * retransmission for UEs configured with TM 1/2/6/7.
26851 * Function: rgSCHCmnDlAllocRetxRb1Tb1Cw
26852 * Purpose: Allocate TB1 on CW1.
26854 * Reference Parameter effBo is filled with alloced bytes.
26855 * Returns RFAILED if BO not satisfied at all.
26857 * Invoked by: rgSCHCmnDlAllocRetxRbTM1/2/6/7
26859 * @param[in] RgSchCellCb *cell
26860 * @param[in] RgSchDlSf *subFrm
26861 * @param[in] RgSchUeCb *ue
26862 * @param[in] U32 bo
26863 * @param[out] U32 *effBo
26864 * @param[in] RgSchDlHqProcCb *proc
26865 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26870 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw
26877 RgSchDlHqProcCb *proc,
26878 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26881 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26887 RgSchDlHqProcCb *proc;
26888 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26891 RgSchDlRbAlloc *allocInfo;
26894 TRC2(rgSCHCmnDlAllocRetxRb1Tb1Cw);
26897 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26900 /* 5GTF: RETX DCI format same as TX */
26901 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26902 &allocInfo->raType);
26905 /* Get the Allocation in terms of RBs that are required for
26906 * this retx of TB1 */
26907 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
26909 if (ret == RFAILED)
26911 /* Allocation couldn't be made for Retx */
26912 /* Fix : syed If TxRetx allocation failed then add the UE along with the proc
26913 * to the nonSchdTxRetxUeLst and let spfc scheduler take care of it during
26915 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
26918 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
26919 /* Fill UE alloc Info */
26920 allocInfo->rbsReq = numRb;
26921 allocInfo->dlSf = subFrm;
26923 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26931 * @brief This function determines the RBs and Bytes required for BO
26932 * transmission for UEs configured with TM 2.
26936 * Function: rgSCHCmnDlAllocTxRbTM1
26939 * Reference Parameter effBo is filled with alloced bytes.
26940 * Returns RFAILED if BO not satisfied at all.
26942 * Invoked by: rgSCHCmnDlAllocTxRb
26944 * @param[in] RgSchCellCb *cell
26945 * @param[in] RgSchDlSf *subFrm
26946 * @param[in] RgSchUeCb *ue
26947 * @param[in] U32 bo
26948 * @param[out] U32 *effBo
26949 * @param[in] RgSchDlHqProcCb *proc
26950 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26955 PRIVATE Void rgSCHCmnDlAllocTxRbTM1
26962 RgSchDlHqProcCb *proc,
26963 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26966 PRIVATE Void rgSCHCmnDlAllocTxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26972 RgSchDlHqProcCb *proc;
26973 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26976 TRC2(rgSCHCmnDlAllocTxRbTM1);
26977 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
26983 * @brief This function determines the RBs and Bytes required for BO
26984 * retransmission for UEs configured with TM 2.
26988 * Function: rgSCHCmnDlAllocRetxRbTM1
26991 * Reference Parameter effBo is filled with alloced bytes.
26992 * Returns RFAILED if BO not satisfied at all.
26994 * Invoked by: rgSCHCmnDlAllocRetxRb
26996 * @param[in] RgSchCellCb *cell
26997 * @param[in] RgSchDlSf *subFrm
26998 * @param[in] RgSchUeCb *ue
26999 * @param[in] U32 bo
27000 * @param[out] U32 *effBo
27001 * @param[in] RgSchDlHqProcCb *proc
27002 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27007 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1
27014 RgSchDlHqProcCb *proc,
27015 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27018 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27024 RgSchDlHqProcCb *proc;
27025 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27028 TRC2(rgSCHCmnDlAllocRetxRbTM1);
27029 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27035 * @brief This function determines the RBs and Bytes required for BO
27036 * transmission for UEs configured with TM 2.
27040 * Function: rgSCHCmnDlAllocTxRbTM2
27043 * Reference Parameter effBo is filled with alloced bytes.
27044 * Returns RFAILED if BO not satisfied at all.
27046 * Invoked by: rgSCHCmnDlAllocTxRb
27048 * @param[in] RgSchCellCb *cell
27049 * @param[in] RgSchDlSf *subFrm
27050 * @param[in] RgSchUeCb *ue
27051 * @param[in] U32 bo
27052 * @param[out] U32 *effBo
27053 * @param[in] RgSchDlHqProcCb *proc
27054 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27059 PRIVATE Void rgSCHCmnDlAllocTxRbTM2
27066 RgSchDlHqProcCb *proc,
27067 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27070 PRIVATE Void rgSCHCmnDlAllocTxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27076 RgSchDlHqProcCb *proc;
27077 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27080 TRC2(rgSCHCmnDlAllocTxRbTM2);
27081 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27087 * @brief This function determines the RBs and Bytes required for BO
27088 * retransmission for UEs configured with TM 2.
27092 * Function: rgSCHCmnDlAllocRetxRbTM2
27095 * Reference Parameter effBo is filled with alloced bytes.
27096 * Returns RFAILED if BO not satisfied at all.
27098 * Invoked by: rgSCHCmnDlAllocRetxRb
27100 * @param[in] RgSchCellCb *cell
27101 * @param[in] RgSchDlSf *subFrm
27102 * @param[in] RgSchUeCb *ue
27103 * @param[in] U32 bo
27104 * @param[out] U32 *effBo
27105 * @param[in] RgSchDlHqProcCb *proc
27106 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27111 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2
27118 RgSchDlHqProcCb *proc,
27119 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27122 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27128 RgSchDlHqProcCb *proc;
27129 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27132 TRC2(rgSCHCmnDlAllocRetxRbTM2);
27133 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27139 * @brief This function determines the RBs and Bytes required for BO
27140 * transmission for UEs configured with TM 3.
27144 * Function: rgSCHCmnDlAllocTxRbTM3
27147 * Reference Parameter effBo is filled with alloced bytes.
27148 * Returns RFAILED if BO not satisfied at all.
27150 * Invoked by: rgSCHCmnDlAllocTxRb
27152 * @param[in] RgSchCellCb *cell
27153 * @param[in] RgSchDlSf *subFrm
27154 * @param[in] RgSchUeCb *ue
27155 * @param[in] U32 bo
27156 * @param[out] U32 *effBo
27157 * @param[in] RgSchDlHqProcCb *proc
27158 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27163 PRIVATE Void rgSCHCmnDlAllocTxRbTM3
27170 RgSchDlHqProcCb *proc,
27171 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27174 PRIVATE Void rgSCHCmnDlAllocTxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27180 RgSchDlHqProcCb *proc;
27181 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27185 TRC2(rgSCHCmnDlAllocTxRbTM3);
27187 /* Both TBs free for TX allocation */
27188 rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo,\
27189 proc, cellWdAllocInfo);
27196 * @brief This function determines the RBs and Bytes required for BO
27197 * retransmission for UEs configured with TM 3.
27201 * Function: rgSCHCmnDlAllocRetxRbTM3
27204 * Reference Parameter effBo is filled with alloced bytes.
27205 * Returns RFAILED if BO not satisfied at all.
27207 * Invoked by: rgSCHCmnDlAllocRetxRb
27209 * @param[in] RgSchCellCb *cell
27210 * @param[in] RgSchDlSf *subFrm
27211 * @param[in] RgSchUeCb *ue
27212 * @param[in] U32 bo
27213 * @param[out] U32 *effBo
27214 * @param[in] RgSchDlHqProcCb *proc
27215 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27220 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3
27227 RgSchDlHqProcCb *proc,
27228 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27231 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27237 RgSchDlHqProcCb *proc;
27238 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27242 TRC2(rgSCHCmnDlAllocRetxRbTM3);
27244 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
27245 (proc->tbInfo[1].state == HQ_TB_NACKED))
27248 printf ("RETX RB TM3 nack for both hqp %d cell %d \n", proc->procId, proc->hqE->cell->cellId);
27250 /* Both TBs require RETX allocation */
27251 rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo,\
27252 proc, cellWdAllocInfo);
27256 /* One of the TBs need RETX allocation. Other TB may/maynot
27257 * be available for new TX allocation. */
27258 rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo,\
27259 proc, cellWdAllocInfo);
27267 * @brief This function performs the DCI format selection in case of
27268 * Transmit Diversity scheme where there can be more
27269 * than 1 option for DCI format selection.
27273 * Function: rgSCHCmnSlctPdcchFrmt
27274 * Purpose: 1. If DLFS is enabled, then choose TM specific
27275 * DCI format for Transmit diversity. All the
27276 * TM Specific DCI Formats support Type0 and/or
27277 * Type1 resource allocation scheme. DLFS
27278 * supports only Type-0&1 Resource allocation.
27279 * 2. If DLFS is not enabled, select a DCI format
27280 * which is of smaller size. Since Non-DLFS
27281 * scheduler supports all Resource allocation
27282 * schemes, selection is based on efficiency.
27284 * Invoked by: DL UE Allocation by Common Scheduler.
27286 * @param[in] RgSchCellCb *cell
27287 * @param[in] RgSchUeCb *ue
27288 * @param[out] U8 *raType
27289 * @return TfuDciFormat
27293 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt
27300 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt(cell, ue, raType)
27306 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
27308 TRC2(rgSCHCmnSlctPdcchFrmt);
27310 /* ccpu00140894- Selective DCI Format and RA type should be selected only
27311 * after TX Mode transition is completed*/
27312 if ((cellSch->dl.isDlFreqSel) && (ue->txModeTransCmplt))
27314 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciRAType;
27315 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciFrmt);
27319 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciRAType;
27320 RETVALUE(rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciFrmt);
27326 * @brief This function handles Retx allocation in case of TM3 UEs
27327 * where both the TBs were NACKED previously.
27331 * Function: rgSCHCmnDlTM3RetxRetx
27332 * Purpose: If forceTD flag enabled
27333 * TD for TB1 on CW1.
27335 * DCI Frmt 2A and RA Type 0
27336 * RI layered SM of both TBs on 2 CWs
27337 * Add UE to cell Alloc Info.
27338 * Fill UE alloc Info.
27341 * Successful allocation is indicated by non-zero effBo value.
27343 * Invoked by: rgSCHCmnDlAllocRbTM3
27345 * @param[in] RgSchCellCb *cell
27346 * @param[in] RgSchDlSf *subFrm
27347 * @param[in] RgSchUeCb *ue
27348 * @param[in] U32 bo
27349 * @param[out] U32 *effBo
27350 * @param[in] RgSchDlHqProcCb *proc
27351 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27356 PRIVATE Void rgSCHCmnDlTM3RetxRetx
27363 RgSchDlHqProcCb *proc,
27364 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27367 PRIVATE Void rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27373 RgSchDlHqProcCb *proc;
27374 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27378 RgSchDlRbAlloc *allocInfo;
27385 TRC2(rgSCHCmnDlTM3RetxRetx);
27388 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27390 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27392 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
27393 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27395 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27397 if (ret == RFAILED)
27399 /* Allocation couldn't be made for Retx */
27400 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27403 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27404 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27405 #ifdef FOUR_TX_ANTENNA
27406 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1 should
27407 * have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27408 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27411 proc->cwSwpEnabled = TRUE;
27414 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27415 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27419 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27422 /* Adding UE to allocInfo RETX Lst */
27423 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27425 /* Fill UE alloc Info scratch pad */
27426 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27427 precInfo, noTxLyrs, subFrm);
27434 * @brief This function handles Retx allocation in case of TM4 UEs
27435 * where both the TBs were NACKED previously.
27439 * Function: rgSCHCmnDlTM4RetxRetx
27440 * Purpose: If forceTD flag enabled
27441 * TD for TB1 on CW1.
27443 * DCI Frmt 2 and RA Type 0
27445 * 1 layer SM of TB1 on CW1.
27447 * RI layered SM of both TBs on 2 CWs
27448 * Add UE to cell Alloc Info.
27449 * Fill UE alloc Info.
27452 * Successful allocation is indicated by non-zero effBo value.
27454 * Invoked by: rgSCHCmnDlAllocRbTM4
27456 * @param[in] RgSchCellCb *cell
27457 * @param[in] RgSchDlSf *subFrm
27458 * @param[in] RgSchUeCb *ue
27459 * @param[in] U32 bo
27460 * @param[out] U32 *effBo
27461 * @param[in] RgSchDlHqProcCb *proc
27462 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27467 PRIVATE Void rgSCHCmnDlTM4RetxRetx
27474 RgSchDlHqProcCb *proc,
27475 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27478 PRIVATE Void rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27484 RgSchDlHqProcCb *proc;
27485 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27489 RgSchDlRbAlloc *allocInfo;
27491 Bool swpFlg = FALSE;
27493 #ifdef FOUR_TX_ANTENNA
27498 TRC2(rgSCHCmnDlTM4RetxRetx);
27501 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27503 /* Irrespective of RI Schedule both CWs */
27504 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
27505 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27507 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27509 if (ret == RFAILED)
27511 /* Allocation couldn't be made for Retx */
27512 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27515 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27517 #ifdef FOUR_TX_ANTENNA
27518 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1
27519 * should have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27520 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27523 proc->cwSwpEnabled = TRUE;
27525 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27526 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27530 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27533 /* Adding UE to allocInfo RETX Lst */
27534 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27536 /* Fill UE alloc Info scratch pad */
27537 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27538 precInfo, noTxLyrs, subFrm);
27546 * @brief This function determines Transmission attributes
27547 * incase of Spatial multiplexing for TX and RETX TBs.
27551 * Function: rgSCHCmnDlSMGetAttrForTxRetx
27552 * Purpose: 1. Reached here for a TM3/4 UE's HqP whose one of the TBs is
27553 * NACKED and the other TB is either NACKED or WAITING.
27554 * 2. Select the NACKED TB for RETX allocation.
27555 * 3. Allocation preference for RETX TB by mapping it to a better
27556 * CW (better in terms of efficiency).
27557 * 4. Determine the state of the other TB.
27558 * Determine if swapFlag were to be set.
27559 * Swap flag would be set if Retx TB is cross
27561 * 5. If UE has new data available for TX and if the other TB's state
27562 * is ACKED then set furtherScope as TRUE.
27564 * Invoked by: rgSCHCmnDlTM3[4]TxRetx
27566 * @param[in] RgSchUeCb *ue
27567 * @param[in] RgSchDlHqProcCb *proc
27568 * @param[out] RgSchDlHqTbCb **retxTb
27569 * @param[out] RgSchDlHqTbCb **txTb
27570 * @param[out] Bool *frthrScp
27571 * @param[out] Bool *swpFlg
27576 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx
27579 RgSchDlHqProcCb *proc,
27580 RgSchDlHqTbCb **retxTb,
27581 RgSchDlHqTbCb **txTb,
27586 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, frthrScp,\
27589 RgSchDlHqProcCb *proc;
27590 RgSchDlHqTbCb **retxTb;
27591 RgSchDlHqTbCb **txTb;
27596 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,proc->hqE->cell);
27597 RgSchDlRbAlloc *allocInfo;
27599 TRC2(rgSCHCmnDlSMGetAttrForTxRetx);
27601 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27603 *retxTb = &proc->tbInfo[0];
27604 *txTb = &proc->tbInfo[1];
27605 /* TENB_BRDCM_TM4- Currently disabling swapflag for TM3/TM4, since
27606 * HqFeedback processing does not consider a swapped hq feedback */
27607 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 1))
27610 proc->cwSwpEnabled = TRUE;
27612 if (proc->tbInfo[1].state == HQ_TB_ACKED)
27614 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27615 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27620 *retxTb = &proc->tbInfo[1];
27621 *txTb = &proc->tbInfo[0];
27622 /* TENB_BRDCM_TM4 - Currently disabling swapflag for TM3/TM4, since
27623 * HqFeedback processing does not consider a swapped hq feedback */
27624 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 0))
27627 proc->cwSwpEnabled = TRUE;
27629 if (proc->tbInfo[0].state == HQ_TB_ACKED)
27631 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27632 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27640 * @brief Determine Precoding information for TM3 2 TX Antenna.
27644 * Function: rgSCHCmnDlTM3PrecInf2
27647 * Invoked by: rgSCHCmnDlGetAttrForTM3
27649 * @param[in] RgSchUeCb *ue
27650 * @param[in] U8 numTxLyrs
27651 * @param[in] Bool bothCwEnbld
27656 PRIVATE U8 rgSCHCmnDlTM3PrecInf2
27664 PRIVATE U8 rgSCHCmnDlTM3PrecInf2(ue, numTxLyrs, bothCwEnbld)
27671 TRC2(rgSCHCmnDlTM3PrecInf2);
27678 * @brief Determine Precoding information for TM4 2 TX Antenna.
27682 * Function: rgSCHCmnDlTM4PrecInf2
27683 * Purpose: To determine a logic of deriving precoding index
27684 * information from 36.212 table 5.3.3.1.5-4
27686 * Invoked by: rgSCHCmnDlGetAttrForTM4
27688 * @param[in] RgSchUeCb *ue
27689 * @param[in] U8 numTxLyrs
27690 * @param[in] Bool bothCwEnbld
27695 PRIVATE U8 rgSCHCmnDlTM4PrecInf2
27703 PRIVATE U8 rgSCHCmnDlTM4PrecInf2(ue, numTxLyrs, bothCwEnbld)
27710 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27713 TRC2(rgSCHCmnDlTM4PrecInf2);
27715 if (ueDl->mimoInfo.ri == numTxLyrs)
27717 if (ueDl->mimoInfo.ri == 2)
27719 /* PrecInfo corresponding to 2 CW
27721 if (ue->mimoInfo.puschFdbkVld)
27727 precIdx = ueDl->mimoInfo.pmi - 1;
27732 /* PrecInfo corresponding to 1 CW
27734 if (ue->mimoInfo.puschFdbkVld)
27740 precIdx = ueDl->mimoInfo.pmi + 1;
27744 else if (ueDl->mimoInfo.ri > numTxLyrs)
27746 /* In case of choosing among the columns of a
27747 * precoding matrix, choose the column corresponding
27748 * to the MAX-CQI */
27749 if (ue->mimoInfo.puschFdbkVld)
27755 precIdx = (ueDl->mimoInfo.pmi- 1)* 2 + 1;
27758 else /* if RI < numTxLyrs */
27760 precIdx = (ueDl->mimoInfo.pmi < 2)? 0:1;
27767 * @brief Determine Precoding information for TM3 4 TX Antenna.
27771 * Function: rgSCHCmnDlTM3PrecInf4
27772 * Purpose: To determine a logic of deriving precoding index
27773 * information from 36.212 table 5.3.3.1.5A-2
27775 * Invoked by: rgSCHCmnDlGetAttrForTM3
27777 * @param[in] RgSchUeCb *ue
27778 * @param[in] U8 numTxLyrs
27779 * @param[in] Bool bothCwEnbld
27784 PRIVATE U8 rgSCHCmnDlTM3PrecInf4
27792 PRIVATE U8 rgSCHCmnDlTM3PrecInf4(ue, numTxLyrs, bothCwEnbld)
27801 TRC2(rgSCHCmnDlTM3PrecInf4);
27805 precIdx = numTxLyrs - 2;
27807 else /* one 1 CW transmission */
27816 * @brief Determine Precoding information for TM4 4 TX Antenna.
27820 * Function: rgSCHCmnDlTM4PrecInf4
27821 * Purpose: To determine a logic of deriving precoding index
27822 * information from 36.212 table 5.3.3.1.5-5
27824 * Invoked by: rgSCHCmnDlGetAttrForTM4
27826 * @param[in] RgSchUeCb *ue
27827 * @param[in] U8 numTxLyrs
27828 * @param[in] Bool bothCwEnbld
27833 PRIVATE U8 rgSCHCmnDlTM4PrecInf4
27841 PRIVATE U8 rgSCHCmnDlTM4PrecInf4(cell, ue, numTxLyrs, bothCwEnbld)
27848 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27849 U8 precInfoBaseIdx, precIdx;
27851 TRC2(rgSCHCmnDlTM4PrecInf4);
27853 precInfoBaseIdx = (ue->mimoInfo.puschFdbkVld)? (16):
27854 (ueDl->mimoInfo.pmi);
27857 precIdx = precInfoBaseIdx + (numTxLyrs-2)*17;
27859 else /* one 1 CW transmission */
27861 precInfoBaseIdx += 1;
27862 precIdx = precInfoBaseIdx + (numTxLyrs-1)*17;
27869 * @brief This function determines Transmission attributes
27870 * incase of TM3 scheduling.
27874 * Function: rgSCHCmnDlGetAttrForTM3
27875 * Purpose: Determine retx TB and tx TB based on TB states.
27876 * If forceTD enabled
27877 * perform only retx TB allocation.
27878 * If retxTB == TB2 then DCI Frmt = 2A, RA Type = 0.
27879 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
27881 * perform retxTB allocation on CW1.
27883 * Determine further Scope and Swap Flag attributes
27884 * assuming a 2 CW transmission of RetxTB and new Tx TB.
27885 * If no further scope for new TX allocation
27886 * Allocate only retx TB using 2 layers if
27887 * this TB was previously transmitted using 2 layers AND
27888 * number of Tx antenna ports == 4.
27889 * otherwise do single layer precoding.
27891 * Invoked by: rgSCHCmnDlTM3TxRetx
27893 * @param[in] RgSchUeCb *ue
27894 * @param[in] RgSchDlHqProcCb *proc
27895 * @param[out] U8 *numTxLyrs
27896 * @param[out] Bool *isTraDiv
27897 * @param[out] U8 *prcdngInf
27898 * @param[out] U8 *raType
27903 PRIVATE Void rgSCHCmnDlGetAttrForTM3
27907 RgSchDlHqProcCb *proc,
27909 TfuDciFormat *dciFrmt,
27911 RgSchDlHqTbCb **retxTb,
27912 RgSchDlHqTbCb **txTb,
27918 PRIVATE Void rgSCHCmnDlGetAttrForTM3(cell, ue, proc, numTxLyrs, dciFrmt,\
27919 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
27922 RgSchDlHqProcCb *proc;
27924 TfuDciFormat *dciFrmt;
27926 RgSchDlHqTbCb **retxTb;
27927 RgSchDlHqTbCb **txTb;
27933 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27936 TRC2(rgSCHCmnDlGetAttrForTM3);
27938 /* Avoiding Tx-Retx for LAA cell as firstSchedTime is associated with
27940 /* Integration_fix: SPS Proc shall always have only one Cw */
27942 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
27943 (ueDl->mimoInfo.forceTD))
27945 ||(TRUE == rgSCHLaaSCellEnabled(cell))
27949 if ((ueDl->mimoInfo.forceTD)
27951 || (TRUE == rgSCHLaaSCellEnabled(cell))
27956 /* Transmit Diversity. Format based on dlfsEnabled
27957 * No further scope */
27958 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27960 *retxTb = &proc->tbInfo[0];
27961 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
27965 *retxTb = &proc->tbInfo[1];
27966 *dciFrmt = TFU_DCI_FORMAT_2A;
27967 *raType = RG_SCH_CMN_RA_TYPE0;
27975 /* Determine the 2 TB transmission attributes */
27976 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
27980 /* Prefer allocation of RETX TB over 2 layers rather than combining
27981 * it with a new TX. */
27982 if ((ueDl->mimoInfo.ri == 2)
27983 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
27985 /* Allocate TB on CW1, using 2 Lyrs,
27986 * Format 2, precoding accordingly */
27992 *numTxLyrs= ((*retxTb)->numLyrs + ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)].noLyr);
27994 if((*retxTb)->tbIdx == 0 && ((*retxTb)->numLyrs == 2 ) && *numTxLyrs ==3)
27997 proc->cwSwpEnabled = TRUE;
27999 else if((*retxTb)->tbIdx == 1 && ((*retxTb)->numLyrs == 1) && *numTxLyrs ==3)
28002 proc->cwSwpEnabled = TRUE;
28006 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28007 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])\
28008 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28009 *dciFrmt = TFU_DCI_FORMAT_2A;
28010 *raType = RG_SCH_CMN_RA_TYPE0;
28012 else /* frthrScp == FALSE */
28014 if (cell->numTxAntPorts == 2)
28016 /* Transmit Diversity */
28018 if ((*retxTb)->tbIdx == 0)
28020 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28024 /* If retxTB is TB2 then use format 2A */
28025 *dciFrmt = TFU_DCI_FORMAT_2A;
28026 *raType = RG_SCH_CMN_RA_TYPE0;
28031 else /* NumAntPorts == 4 */
28033 if ((*retxTb)->numLyrs == 2)
28035 /* Allocate TB on CW1, using 2 Lyrs,
28036 * Format 2A, precoding accordingly */
28038 *dciFrmt = TFU_DCI_FORMAT_2A;
28039 *raType = RG_SCH_CMN_RA_TYPE0;
28040 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28041 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, *numTxLyrs, *frthrScp);
28046 /* Transmit Diversity */
28048 if ((*retxTb)->tbIdx == 0)
28050 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28054 /* If retxTB is TB2 then use format 2A */
28055 *dciFrmt = TFU_DCI_FORMAT_2A;
28056 *raType = RG_SCH_CMN_RA_TYPE0;
28070 * @brief This function determines Transmission attributes
28071 * incase of TM4 scheduling.
28075 * Function: rgSCHCmnDlGetAttrForTM4
28076 * Purpose: Determine retx TB and tx TB based on TB states.
28077 * If forceTD enabled
28078 * perform only retx TB allocation.
28079 * If retxTB == TB2 then DCI Frmt = 2, RA Type = 0.
28080 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
28082 * perform retxTB allocation on CW1.
28084 * Determine further Scope and Swap Flag attributes
28085 * assuming a 2 CW transmission of RetxTB and new Tx TB.
28086 * If no further scope for new TX allocation
28087 * Allocate only retx TB using 2 layers if
28088 * this TB was previously transmitted using 2 layers AND
28089 * number of Tx antenna ports == 4.
28090 * otherwise do single layer precoding.
28092 * Invoked by: rgSCHCmnDlTM4TxRetx
28094 * @param[in] RgSchUeCb *ue
28095 * @param[in] RgSchDlHqProcCb *proc
28096 * @param[out] U8 *numTxLyrs
28097 * @param[out] Bool *isTraDiv
28098 * @param[out] U8 *prcdngInf
28099 * @param[out] U8 *raType
28104 PRIVATE Void rgSCHCmnDlGetAttrForTM4
28108 RgSchDlHqProcCb *proc,
28110 TfuDciFormat *dciFrmt,
28112 RgSchDlHqTbCb **retxTb,
28113 RgSchDlHqTbCb **txTb,
28119 PRIVATE Void rgSCHCmnDlGetAttrForTM4(cell, ue, proc, numTxLyrs, dciFrmt,\
28120 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
28123 RgSchDlHqProcCb *proc;
28125 TfuDciFormat *dciFrmt;
28127 RgSchDlHqTbCb **retxTb;
28128 RgSchDlHqTbCb **txTb;
28134 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28137 TRC2(rgSCHCmnDlGetAttrForTM4);
28140 /* Integration_fix: SPS Proc shall always have only one Cw */
28142 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28143 (ueDl->mimoInfo.forceTD))
28145 ||(TRUE == rgSCHLaaSCellEnabled(cell))
28149 if ((ueDl->mimoInfo.forceTD)
28151 || (TRUE == rgSCHLaaSCellEnabled(cell))
28156 /* Transmit Diversity. Format based on dlfsEnabled
28157 * No further scope */
28158 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28160 *retxTb = &proc->tbInfo[0];
28161 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28165 *retxTb = &proc->tbInfo[1];
28166 *dciFrmt = TFU_DCI_FORMAT_2;
28167 *raType = RG_SCH_CMN_RA_TYPE0;
28175 if (ueDl->mimoInfo.ri == 1)
28177 /* single layer precoding. Format 2.
28178 * No further scope */
28179 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28181 *retxTb = &proc->tbInfo[0];
28185 *retxTb = &proc->tbInfo[1];
28188 *dciFrmt = TFU_DCI_FORMAT_2;
28189 *raType = RG_SCH_CMN_RA_TYPE0;
28191 *prcdngInf = 0; /*When RI= 1*/
28195 /* Determine the 2 TB transmission attributes */
28196 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
28198 *dciFrmt = TFU_DCI_FORMAT_2;
28199 *raType = RG_SCH_CMN_RA_TYPE0;
28202 /* Prefer allocation of RETX TB over 2 layers rather than combining
28203 * it with a new TX. */
28204 if ((ueDl->mimoInfo.ri == 2)
28205 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
28207 /* Allocate TB on CW1, using 2 Lyrs,
28208 * Format 2, precoding accordingly */
28212 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28213 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])
28214 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28216 else /* frthrScp == FALSE */
28218 if (cell->numTxAntPorts == 2)
28220 /* single layer precoding. Format 2. */
28222 *prcdngInf = (getPrecInfoFunc[1][cell->numTxAntPorts/2 - 1])\
28223 (cell, ue, *numTxLyrs, *frthrScp);
28226 else /* NumAntPorts == 4 */
28228 if ((*retxTb)->numLyrs == 2)
28230 /* Allocate TB on CW1, using 2 Lyrs,
28231 * Format 2, precoding accordingly */
28233 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28234 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28235 (cell, ue, *numTxLyrs, *frthrScp);
28240 /* Allocate TB with 1 lyr precoding,
28241 * Format 2, precoding info accordingly */
28243 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28244 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28245 (cell, ue, *numTxLyrs, *frthrScp);
28256 * @brief This function handles Retx allocation in case of TM3 UEs
28257 * where previously one of the TBs was NACKED and the other
28258 * TB is either ACKED/WAITING.
28262 * Function: rgSCHCmnDlTM3TxRetx
28263 * Purpose: Determine the TX attributes for TM3 TxRetx Allocation.
28264 * If futher Scope for New Tx Allocation on other TB
28265 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28266 * Add UE to cell wide RetxTx List.
28268 * Perform only RETX alloc'n on CW1.
28269 * Add UE to cell wide Retx List.
28271 * effBo is set to a non-zero value if allocation is
28274 * Invoked by: rgSCHCmnDlAllocRbTM3
28276 * @param[in] RgSchCellCb *cell
28277 * @param[in] RgSchDlSf *subFrm
28278 * @param[in] RgSchUeCb *ue
28279 * @param[in] U32 bo
28280 * @param[out] U32 *effBo
28281 * @param[in] RgSchDlHqProcCb *proc
28282 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28287 PRIVATE Void rgSCHCmnDlTM3TxRetx
28294 RgSchDlHqProcCb *proc,
28295 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28298 PRIVATE Void rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28304 RgSchDlHqProcCb *proc;
28305 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28309 RgSchDlRbAlloc *allocInfo;
28311 RgSchDlHqTbCb *retxTb, *txTb;
28317 TRC2(rgSCHCmnDlTM3TxRetx);
28321 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28324 /* Determine the transmission attributes */
28325 rgSCHCmnDlGetAttrForTM3(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28326 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28327 &allocInfo->raType);
28332 printf ("TX RETX called from proc %d cell %d \n",proc->procId, cell->cellId);
28334 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28336 if (ret == RFAILED)
28338 /* Allocation couldn't be made for Retx */
28339 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28342 /* Adding UE to RbAllocInfo RETX-TX Lst */
28343 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28347 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28348 numTxLyrs, &numRb, effBo);
28349 if (ret == RFAILED)
28351 /* Allocation couldn't be made for Retx */
28352 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28356 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28359 /* Adding UE to allocInfo RETX Lst */
28360 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28363 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28364 prcdngInf, numTxLyrs, subFrm);
28371 * @brief This function handles Retx allocation in case of TM4 UEs
28372 * where previously one of the TBs was NACKED and the other
28373 * TB is either ACKED/WAITING.
28377 * Function: rgSCHCmnDlTM4TxRetx
28378 * Purpose: Determine the TX attributes for TM4 TxRetx Allocation.
28379 * If futher Scope for New Tx Allocation on other TB
28380 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28381 * Add UE to cell wide RetxTx List.
28383 * Perform only RETX alloc'n on CW1.
28384 * Add UE to cell wide Retx List.
28386 * effBo is set to a non-zero value if allocation is
28389 * Invoked by: rgSCHCmnDlAllocRbTM4
28391 * @param[in] RgSchCellCb *cell
28392 * @param[in] RgSchDlSf *subFrm
28393 * @param[in] RgSchUeCb *ue
28394 * @param[in] U32 bo
28395 * @param[out] U32 *effBo
28396 * @param[in] RgSchDlHqProcCb *proc
28397 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28402 PRIVATE Void rgSCHCmnDlTM4TxRetx
28409 RgSchDlHqProcCb *proc,
28410 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28413 PRIVATE Void rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28419 RgSchDlHqProcCb *proc;
28420 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28424 RgSchDlRbAlloc *allocInfo;
28426 RgSchDlHqTbCb *retxTb, *txTb;
28432 TRC2(rgSCHCmnDlTM4TxRetx);
28435 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28438 /* Determine the transmission attributes */
28439 rgSCHCmnDlGetAttrForTM4(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28440 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28441 &allocInfo->raType);
28445 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28447 if (ret == RFAILED)
28449 /* Fix : syed If TxRetx allocation failed then add the UE along
28450 * with the proc to the nonSchdTxRetxUeLst and let spfc scheduler
28451 * take care of it during finalization. */
28452 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28455 /* Adding UE to RbAllocInfo RETX-TX Lst */
28456 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28460 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28461 numTxLyrs, &numRb, effBo);
28462 if (ret == RFAILED)
28464 /* Allocation couldn't be made for Retx */
28465 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28469 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28472 /* Adding UE to allocInfo RETX Lst */
28473 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28476 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28477 prcdngInf, numTxLyrs, subFrm)
28484 * @brief This function handles Retx allocation in case of TM4 UEs
28485 * where previously both the TBs were ACKED and ACKED
28490 * Function: rgSCHCmnDlTM3TxTx
28491 * Purpose: Reached here for a TM3 UE's HqP's fresh allocation
28492 * where both the TBs are free for TX scheduling.
28493 * If forceTD flag is set
28494 * perform TD on CW1 with TB1.
28499 * RI layered precoding 2 TB on 2 CW.
28500 * Set precoding info.
28501 * Add UE to cellAllocInfo.
28502 * Fill ueAllocInfo.
28504 * effBo is set to a non-zero value if allocation is
28507 * Invoked by: rgSCHCmnDlAllocRbTM3
28509 * @param[in] RgSchCellCb *cell
28510 * @param[in] RgSchDlSf *subFrm
28511 * @param[in] RgSchUeCb *ue
28512 * @param[in] U32 bo
28513 * @param[out] U32 *effBo
28514 * @param[in] RgSchDlHqProcCb *proc
28515 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28520 PRIVATE Void rgSCHCmnDlTM3TxTx
28527 RgSchDlHqProcCb *proc,
28528 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28531 PRIVATE Void rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28537 RgSchDlHqProcCb *proc;
28538 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28541 RgSchCmnDlUe *ueDl;
28542 RgSchDlRbAlloc *allocInfo;
28549 TRC2(rgSCHCmnDlTM3TxTx);
28552 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28553 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28555 /* Integration_fix: SPS Proc shall always have only one Cw */
28557 #ifdef FOUR_TX_ANTENNA
28558 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28559 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28561 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28562 (ueDl->mimoInfo.forceTD))
28565 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28568 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28569 &allocInfo->raType);
28570 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28571 bo, &numRb, effBo);
28572 if (ret == RFAILED)
28574 /* If allocation couldn't be made then return */
28578 precInfo = 0; /* TD */
28580 else /* Precoding */
28582 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
28583 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28585 /* Spatial Multiplexing using 2 CWs */
28586 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28587 if (ret == RFAILED)
28589 /* If allocation couldn't be made then return */
28592 noTxLyrs = ueDl->mimoInfo.ri;
28593 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28594 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, getPrecInfoFunc[0], precInfoAntIdx);
28595 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28599 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28602 /* Adding UE to RbAllocInfo TX Lst */
28603 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28605 /* Fill UE allocInfo scrath pad */
28606 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28607 precInfo, noTxLyrs, subFrm);
28614 * @brief This function handles Retx allocation in case of TM4 UEs
28615 * where previously both the TBs were ACKED and ACKED
28620 * Function: rgSCHCmnDlTM4TxTx
28621 * Purpose: Reached here for a TM4 UE's HqP's fresh allocation
28622 * where both the TBs are free for TX scheduling.
28623 * If forceTD flag is set
28624 * perform TD on CW1 with TB1.
28630 * Single layer precoding of TB1 on CW1.
28631 * Set precoding info.
28633 * RI layered precoding 2 TB on 2 CW.
28634 * Set precoding info.
28635 * Add UE to cellAllocInfo.
28636 * Fill ueAllocInfo.
28638 * effBo is set to a non-zero value if allocation is
28641 * Invoked by: rgSCHCmnDlAllocRbTM4
28643 * @param[in] RgSchCellCb *cell
28644 * @param[in] RgSchDlSf *subFrm
28645 * @param[in] RgSchUeCb *ue
28646 * @param[in] U32 bo
28647 * @param[out] U32 *effBo
28648 * @param[in] RgSchDlHqProcCb *proc
28649 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28654 PRIVATE Void rgSCHCmnDlTM4TxTx
28661 RgSchDlHqProcCb *proc,
28662 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28665 PRIVATE Void rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28671 RgSchDlHqProcCb *proc;
28672 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28675 RgSchCmnDlUe *ueDl;
28676 RgSchDlRbAlloc *allocInfo;
28683 TRC2(rgSCHCmnDlTM4TxTx);
28686 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28687 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28689 /* Integration_fix: SPS Proc shall always have only one Cw */
28691 #ifdef FOUR_TX_ANTENNA
28692 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28693 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28695 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28696 (ueDl->mimoInfo.forceTD))
28699 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28702 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28703 &allocInfo->raType);
28705 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28706 bo, &numRb, effBo);
28707 if (ret == RFAILED)
28709 /* If allocation couldn't be made then return */
28713 precInfo = 0; /* TD */
28715 else /* Precoding */
28717 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
28718 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28720 if (ueDl->mimoInfo.ri == 1)
28722 /* Single Layer SM using FORMAT 2 */
28723 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28724 bo, &numRb, effBo);
28725 if (ret == RFAILED)
28727 /* If allocation couldn't be made then return */
28731 precInfo = 0; /* PrecInfo as 0 for RI=1*/
28735 /* Spatial Multiplexing using 2 CWs */
28736 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28737 if (ret == RFAILED)
28739 /* If allocation couldn't be made then return */
28742 noTxLyrs = ueDl->mimoInfo.ri;
28743 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28744 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28750 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28753 /* Adding UE to RbAllocInfo TX Lst */
28754 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28757 /* Fill UE allocInfo scrath pad */
28758 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28759 precInfo, noTxLyrs, subFrm);
28766 * @brief This function determines the RBs and Bytes required for BO
28767 * transmission for UEs configured with TM 4.
28771 * Function: rgSCHCmnDlAllocTxRbTM4
28772 * Purpose: Invokes the functionality particular to the
28773 * current state of the TBs of the "proc".
28775 * Reference Parameter effBo is filled with alloced bytes.
28776 * Returns RFAILED if BO not satisfied at all.
28778 * Invoked by: rgSCHCmnDlAllocTxRb
28780 * @param[in] RgSchCellCb *cell
28781 * @param[in] RgSchDlSf *subFrm
28782 * @param[in] RgSchUeCb *ue
28783 * @param[in] U32 bo
28784 * @param[out] U32 *effBo
28785 * @param[in] RgSchDlHqProcCb *proc
28786 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28791 PRIVATE Void rgSCHCmnDlAllocTxRbTM4
28798 RgSchDlHqProcCb *proc,
28799 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28802 PRIVATE Void rgSCHCmnDlAllocTxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28808 RgSchDlHqProcCb *proc;
28809 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28812 TRC2(rgSCHCmnDlAllocTxRbTM4);
28814 /* Both TBs free for TX allocation */
28815 rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo,\
28816 proc, cellWdAllocInfo);
28823 * @brief This function determines the RBs and Bytes required for BO
28824 * retransmission for UEs configured with TM 4.
28828 * Function: rgSCHCmnDlAllocRetxRbTM4
28829 * Purpose: Invokes the functionality particular to the
28830 * current state of the TBs of the "proc".
28832 * Reference Parameter effBo is filled with alloced bytes.
28833 * Returns RFAILED if BO not satisfied at all.
28835 * Invoked by: rgSCHCmnDlAllocRetxRb
28837 * @param[in] RgSchCellCb *cell
28838 * @param[in] RgSchDlSf *subFrm
28839 * @param[in] RgSchUeCb *ue
28840 * @param[in] U32 bo
28841 * @param[out] U32 *effBo
28842 * @param[in] RgSchDlHqProcCb *proc
28843 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28848 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4
28855 RgSchDlHqProcCb *proc,
28856 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28859 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28865 RgSchDlHqProcCb *proc;
28866 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28869 TRC2(rgSCHCmnDlAllocRetxRbTM4);
28871 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
28872 (proc->tbInfo[1].state == HQ_TB_NACKED))
28874 /* Both TBs require RETX allocation */
28875 rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo,\
28876 proc, cellWdAllocInfo);
28880 /* One of the TBs need RETX allocation. Other TB may/maynot
28881 * be available for new TX allocation. */
28882 rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo,\
28883 proc, cellWdAllocInfo);
28892 * @brief This function determines the RBs and Bytes required for BO
28893 * transmission for UEs configured with TM 5.
28897 * Function: rgSCHCmnDlAllocTxRbTM5
28900 * Reference Parameter effBo is filled with alloced bytes.
28901 * Returns RFAILED if BO not satisfied at all.
28903 * Invoked by: rgSCHCmnDlAllocTxRb
28905 * @param[in] RgSchCellCb *cell
28906 * @param[in] RgSchDlSf *subFrm
28907 * @param[in] RgSchUeCb *ue
28908 * @param[in] U32 bo
28909 * @param[out] U32 *effBo
28910 * @param[in] RgSchDlHqProcCb *proc
28911 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28916 PRIVATE Void rgSCHCmnDlAllocTxRbTM5
28923 RgSchDlHqProcCb *proc,
28924 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28927 PRIVATE Void rgSCHCmnDlAllocTxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28933 RgSchDlHqProcCb *proc;
28934 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28937 TRC2(rgSCHCmnDlAllocTxRbTM5);
28938 #if (ERRCLASS & ERRCLS_DEBUG)
28939 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
28946 * @brief This function determines the RBs and Bytes required for BO
28947 * retransmission for UEs configured with TM 5.
28951 * Function: rgSCHCmnDlAllocRetxRbTM5
28954 * Reference Parameter effBo is filled with alloced bytes.
28955 * Returns RFAILED if BO not satisfied at all.
28957 * Invoked by: rgSCHCmnDlAllocRetxRb
28959 * @param[in] RgSchCellCb *cell
28960 * @param[in] RgSchDlSf *subFrm
28961 * @param[in] RgSchUeCb *ue
28962 * @param[in] U32 bo
28963 * @param[out] U32 *effBo
28964 * @param[in] RgSchDlHqProcCb *proc
28965 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28970 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5
28977 RgSchDlHqProcCb *proc,
28978 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28981 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28987 RgSchDlHqProcCb *proc;
28988 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28991 TRC2(rgSCHCmnDlAllocRetxRbTM5);
28992 #if (ERRCLASS & ERRCLS_DEBUG)
28993 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
29001 * @brief This function determines the RBs and Bytes required for BO
29002 * transmission for UEs configured with TM 6.
29006 * Function: rgSCHCmnDlAllocTxRbTM6
29009 * Reference Parameter effBo is filled with alloced bytes.
29010 * Returns RFAILED if BO not satisfied at all.
29012 * Invoked by: rgSCHCmnDlAllocTxRb
29014 * @param[in] RgSchCellCb *cell
29015 * @param[in] RgSchDlSf *subFrm
29016 * @param[in] RgSchUeCb *ue
29017 * @param[in] U32 bo
29018 * @param[out] U32 *effBo
29019 * @param[in] RgSchDlHqProcCb *proc
29020 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29025 PRIVATE Void rgSCHCmnDlAllocTxRbTM6
29032 RgSchDlHqProcCb *proc,
29033 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29036 PRIVATE Void rgSCHCmnDlAllocTxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29042 RgSchDlHqProcCb *proc;
29043 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29046 RgSchDlRbAlloc *allocInfo;
29047 RgSchCmnDlUe *ueDl;
29051 TRC2(rgSCHCmnDlAllocTxRbTM6);
29054 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29055 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29057 if (ueDl->mimoInfo.forceTD)
29059 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29060 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29064 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29065 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29066 /* Fill precoding information for FORMAT 1B */
29067 /* First 4 least significant bits to indicate PMI.
29068 * 4th most significant corresponds to pmi Confirmation.
29070 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29071 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29073 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
29074 bo, &numRb, effBo);
29075 if (ret == RFAILED)
29077 /* If allocation couldn't be made then return */
29082 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
29085 /* Adding UE to RbAllocInfo TX Lst */
29086 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
29088 /* Fill UE alloc Info */
29089 allocInfo->rbsReq = numRb;
29090 allocInfo->dlSf = subFrm;
29096 * @brief This function determines the RBs and Bytes required for BO
29097 * retransmission for UEs configured with TM 6.
29101 * Function: rgSCHCmnDlAllocRetxRbTM6
29104 * Reference Parameter effBo is filled with alloced bytes.
29105 * Returns RFAILED if BO not satisfied at all.
29107 * Invoked by: rgSCHCmnDlAllocRetxRb
29109 * @param[in] RgSchCellCb *cell
29110 * @param[in] RgSchDlSf *subFrm
29111 * @param[in] RgSchUeCb *ue
29112 * @param[in] U32 bo
29113 * @param[out] U32 *effBo
29114 * @param[in] RgSchDlHqProcCb *proc
29115 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29120 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6
29127 RgSchDlHqProcCb *proc,
29128 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29131 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29137 RgSchDlHqProcCb *proc;
29138 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29141 RgSchDlRbAlloc *allocInfo;
29142 RgSchCmnDlUe *ueDl;
29146 TRC2(rgSCHCmnDlAllocRetxRbTM6);
29149 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29150 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29152 if (ueDl->mimoInfo.forceTD)
29154 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29155 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29159 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29160 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29161 /* Fill precoding information for FORMAT 1B */
29162 /* First 4 least significant bits to indicate PMI.
29163 * 4th most significant corresponds to pmi Confirmation.
29165 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29166 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29169 /* Get the Allocation in terms of RBs that are required for
29170 * this retx of TB1 */
29171 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
29173 if (ret == RFAILED)
29175 /* Allocation couldn't be made for Retx */
29176 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
29179 /* Adding UE to allocInfo RETX Lst */
29180 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
29181 /* Fill UE alloc Info */
29182 allocInfo->rbsReq = numRb;
29183 allocInfo->dlSf = subFrm;
29189 * @brief This function determines the RBs and Bytes required for BO
29190 * transmission for UEs configured with TM 7.
29194 * Function: rgSCHCmnDlAllocTxRbTM7
29197 * Reference Parameter effBo is filled with alloced bytes.
29198 * Returns RFAILED if BO not satisfied at all.
29200 * Invoked by: rgSCHCmnDlAllocTxRb
29202 * @param[in] RgSchCellCb *cell
29203 * @param[in] RgSchDlSf *subFrm
29204 * @param[in] RgSchUeCb *ue
29205 * @param[in] U32 bo
29206 * @param[out] U32 *effBo
29207 * @param[in] RgSchDlHqProcCb *proc
29208 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29213 PRIVATE Void rgSCHCmnDlAllocTxRbTM7
29220 RgSchDlHqProcCb *proc,
29221 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29224 PRIVATE Void rgSCHCmnDlAllocTxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29230 RgSchDlHqProcCb *proc;
29231 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29234 TRC2(rgSCHCmnDlAllocTxRbTM7);
29235 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29241 * @brief This function determines the RBs and Bytes required for BO
29242 * retransmission for UEs configured with TM 7.
29246 * Function: rgSCHCmnDlAllocRetxRbTM7
29249 * Reference Parameter effBo is filled with alloced bytes.
29250 * Returns RFAILED if BO not satisfied at all.
29252 * Invoked by: rgSCHCmnDlAllocRetxRb
29254 * @param[in] RgSchCellCb *cell
29255 * @param[in] RgSchDlSf *subFrm
29256 * @param[in] RgSchUeCb *ue
29257 * @param[in] U32 bo
29258 * @param[out] U32 *effBo
29259 * @param[in] RgSchDlHqProcCb *proc
29260 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29265 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7
29272 RgSchDlHqProcCb *proc,
29273 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29276 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29282 RgSchDlHqProcCb *proc;
29283 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29286 TRC2(rgSCHCmnDlAllocRetxRbTM7);
29287 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29293 * @brief This function invokes the TM specific DL TX RB Allocation routine.
29297 * Function: rgSCHCmnDlAllocTxRb
29298 * Purpose: This function invokes the TM specific
29299 * DL TX RB Allocation routine.
29301 * Invoked by: Specific Schedulers
29303 * @param[in] RgSchCellCb *cell
29304 * @param[in] RgSchDlSf *subFrm
29305 * @param[in] RgSchUeCb *ue
29306 * @param[in] U32 bo
29307 * @param[out] U32 *effBo
29308 * @param[in] RgSchDlHqProcCb *proc
29309 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29314 PUBLIC S16 rgSCHCmnDlAllocTxRb
29321 RgSchDlHqProcCb *proc,
29322 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29325 PUBLIC S16 rgSCHCmnDlAllocTxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29331 RgSchDlHqProcCb *proc;
29332 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29335 U32 newSchBits = 0;
29336 U32 prevSchBits = 0;
29337 RgSchDlRbAlloc *allocInfo;
29339 TRC2(rgSCHCmnDlAllocTxRb);
29341 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29343 ue->dl.aggTbBits = 0;
29347 /* Calculate totals bits previously allocated */
29348 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29349 if (allocInfo->tbInfo[0].schdlngForTb)
29351 prevSchBits += allocInfo->tbInfo[0].bytesReq;
29353 if (allocInfo->tbInfo[1].schdlngForTb)
29355 prevSchBits += allocInfo->tbInfo[1].bytesReq;
29358 /* Call TM specific RB allocation routine */
29359 (dlAllocTxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29360 proc, cellWdAllocInfo);
29364 /* Calculate totals bits newly allocated */
29365 if (allocInfo->tbInfo[0].schdlngForTb)
29367 newSchBits += allocInfo->tbInfo[0].bytesReq;
29369 if (allocInfo->tbInfo[1].schdlngForTb)
29371 newSchBits += allocInfo->tbInfo[1].bytesReq;
29373 if (newSchBits > prevSchBits)
29375 ue->dl.aggTbBits += ((newSchBits - prevSchBits) * 8);
29376 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29383 /* DwPTS Scheduling Changes Start */
29386 * @brief Retransmit decision for TDD. Retx is avoided in below cases
29387 * 1) DL Sf -> Spl Sf
29388 * 2) DL SF -> DL SF 0
29392 * Function: rgSCHCmnRetxAvoidTdd
29393 * Purpose: Avoid allocating RETX for cases 1, 2
29395 * Invoked by: rgSCHCmnRetxAvoidTdd
29397 * @param[in] RgSchDlSf *curSf
29398 * @param[in] RgSchCellCb *cell
29399 * @param[in] RgSchDlHqProcCb *proc
29404 PUBLIC Bool rgSCHCmnRetxAvoidTdd
29408 RgSchDlHqProcCb *proc
29411 PUBLIC Bool rgSCHCmnRetxAvoidTdd(curSf, cell, proc)
29414 RgSchDlHqProcCb *proc;
29417 RgSchTddSfType txSfType = 0;
29419 TRC2(rgSCHCmnRetxAvoidTdd);
29421 /* Get the RBs of TB that will be retransmitted */
29422 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29424 txSfType = proc->tbInfo[0].sfType;
29426 #ifdef XEON_SPECIFIC_CHANGES
29427 #ifndef XEON_TDD_SPCL
29428 /* Avoid re-transmission on Normal SF when the corresponding TB wss transmitted on SPCL SF */
29429 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29436 if (proc->tbInfo[1].state == HQ_TB_NACKED)
29438 /* Select the TxSf with the highest num of possible REs
29439 * In ascending order -> 1) SPL SF 2) DL_SF_0 3) DL_SF */
29440 txSfType = RGSCH_MAX(txSfType, proc->tbInfo[1].sfType);
29442 #ifdef XEON_SPECIFIC_CHANGES
29443 #ifndef XEON_TDD_SPCL
29444 /* Avoid re-transmission on Normal SF when the corresponding TB wss tranmitted on SPCL SF */
29445 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29453 if (txSfType > curSf->sfType)
29464 /* DwPTS Scheduling Changes End */
29467 * @brief Avoid allocating RETX incase of collision
29468 * with reserved resources for BCH/PSS/SSS occassions.
29472 * Function: rgSCHCmnRetxAllocAvoid
29473 * Purpose: Avoid allocating RETX incase of collision
29474 * with reserved resources for BCH/PSS/SSS occassions
29476 * Invoked by: rgSCHCmnDlAllocRetxRb
29478 * @param[in] RgSchDlSf *subFrm
29479 * @param[in] RgSchUeCb *ue
29480 * @param[in] RgSchDlHqProcCb *proc
29485 PUBLIC Bool rgSCHCmnRetxAllocAvoid
29489 RgSchDlHqProcCb *proc
29492 PUBLIC Bool rgSCHCmnRetxAllocAvoid(subFrm, cell, proc)
29495 RgSchDlHqProcCb *proc;
29500 TRC2(rgSCHCmnRetxAllocAvoid);
29502 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29504 reqRbs = proc->tbInfo[0].dlGrnt.numRb;
29508 reqRbs = proc->tbInfo[1].dlGrnt.numRb;
29510 /* Consider the dlGrnt.numRb of the Retransmitting proc->tbInfo
29511 * and current available RBs to determine if this RETX TB
29512 * will collide with the BCH/PSS/SSS occassion */
29513 if (subFrm->sfNum % 5 == 0)
29515 if ((subFrm->bwAssigned < cell->pbchRbEnd) &&
29516 (((subFrm->bwAssigned + reqRbs) - cell->pbchRbStart) > 0))
29528 * @brief This function invokes the TM specific DL RETX RB Allocation routine.
29532 * Function: rgSCHCmnDlAllocRetxRb
29533 * Purpose: This function invokes the TM specific
29534 * DL RETX RB Allocation routine.
29536 * Invoked by: Specific Schedulers
29538 * @param[in] RgSchCellCb *cell
29539 * @param[in] RgSchDlSf *subFrm
29540 * @param[in] RgSchUeCb *ue
29541 * @param[in] U32 bo
29542 * @param[out] U32 *effBo
29543 * @param[in] RgSchDlHqProcCb *proc
29544 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29549 PUBLIC S16 rgSCHCmnDlAllocRetxRb
29556 RgSchDlHqProcCb *proc,
29557 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29560 PUBLIC S16 rgSCHCmnDlAllocRetxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29566 RgSchDlHqProcCb *proc;
29567 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29570 U32 newSchBits = 0;
29571 RgSchDlRbAlloc *allocInfo;
29573 TRC2(rgSCHCmnDlAllocRetxRb);
29575 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29577 ue->dl.aggTbBits = 0;
29581 /* Check for DL BW exhaustion */
29582 if (subFrm->bw <= subFrm->bwAssigned)
29586 /* Call TM specific RB allocation routine */
29587 (dlAllocRetxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29588 proc, cellWdAllocInfo);
29592 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29593 /* Calculate totals bits newly allocated */
29594 if (allocInfo->tbInfo[0].schdlngForTb)
29596 newSchBits += allocInfo->tbInfo[0].bytesReq;
29598 if (allocInfo->tbInfo[1].schdlngForTb)
29600 newSchBits += allocInfo->tbInfo[1].bytesReq;
29602 ue->dl.aggTbBits += (newSchBits * 8);
29603 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29611 * @brief This function determines the RBs and Bytes required for
29612 * Transmission on 1 CW.
29616 * Function: rgSCHCmnDlAlloc1CwTxRb
29617 * Purpose: This function determines the RBs and Bytes required
29618 * for Transmission of DL SVC BO on 1 CW.
29619 * Also, takes care of SVC by SVC allocation by tracking
29620 * previous SVCs allocations.
29621 * Returns RFAILED if BO not satisfied at all.
29623 * Invoked by: DL UE Allocation
29625 * @param[in] RgSchCellCb *cell
29626 * @param[in] RgSchDlSf *subFrm
29627 * @param[in] RgSchUeCb *ue
29628 * @param[in] RgSchDlHqTbCb *tbInfo
29629 * @param[in] U32 bo
29630 * @param[out] U8 *numRb
29631 * @param[out] U32 *effBo
29636 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb
29641 RgSchDlHqTbCb *tbInfo,
29647 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, tbInfo, bo, numRb, effBo)
29651 RgSchDlHqTbCb *tbInfo;
29660 RgSchCmnDlUe *ueDl;
29661 RgSchDlRbAlloc *allocInfo;
29664 /* Correcting wrap around issue.
29665 * This change has been done at mutliple places in this function.*/
29667 TRC2(rgSCHCmnDlAlloc1CwTxRb);
29670 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29671 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29672 oldReq = ueDl->outStndAlloc;
29675 //TODO_SID: Currently setting max Tb size wrt to 5GTF TM3
29676 iTbs = ue->ue5gtfCb.mcs;
29677 ueDl->maxTbSz = MAX_5GTF_TB_SIZE * ue->ue5gtfCb.rank;
29678 ueDl->maxRb = MAX_5GTF_PRBS;
29680 ueDl->outStndAlloc += bo;
29681 /* consider Cumulative amount of this BO and bytes so far allocated */
29682 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbSz/8);
29683 /* Get the number of REs needed for this bo. */
29684 //noRes = ((bo * 8 * 1024) / eff);
29686 /* Get the number of RBs needed for this transmission */
29687 /* Number of RBs = No of REs / No of REs per RB */
29688 //tempNumRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29689 tempNumRb = MAX_5GTF_PRBS;
29690 tbSz = RGSCH_MIN(bo, (rgSch5gtfTbSzTbl[iTbs]/8) * ue->ue5gtfCb.rank);
29692 /* DwPts Scheduling Changes End */
29693 *effBo = RGSCH_MIN(tbSz - oldReq, reqBytes);
29696 //RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs);
29701 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbSz, \
29702 iTbs, imcs, tbInfo, ue->ue5gtfCb.rank);
29703 *numRb = (U8) tempNumRb;
29705 /* Update the subframe Allocated BW field */
29706 subFrm->bwAssigned = subFrm->bwAssigned + tempNumRb - allocInfo->rbsReq;
29713 * @brief This function is invoked in the event of any TB's allocation
29714 * being underutilized by the specific scheduler. Here we reduce iMcs
29715 * to increase redundancy and hence increase reception quality at UE.
29719 * Function: rgSCHCmnRdcImcsTxTb
29720 * Purpose: This function shall reduce the iMcs in accordance with
29721 * the total consumed bytes by the UE at allocation
29724 * Invoked by: UE DL Allocation finalization routine
29725 * of specific scheduler.
29727 * @param[in] RgSchDlRbAlloc *allocInfo
29728 * @param[in] U8 tbInfoIdx
29729 * @param[in] U32 cnsmdBytes
29734 PUBLIC Void rgSCHCmnRdcImcsTxTb
29736 RgSchDlRbAlloc *allocInfo,
29741 PUBLIC Void rgSCHCmnRdcImcsTxTb(allocInfo, tbInfoIdx, cnsmdBytes)
29742 RgSchDlRbAlloc *allocInfo;
29748 /*The below functionality is not needed.*/
29753 TRC2(rgSCHCmnRdcImcsTxTb);
29755 iTbs = allocInfo->tbInfo[tbInfoIdx].iTbs;
29756 noLyr = allocInfo->tbInfo[tbInfoIdx].noLyr;
29757 numRb = allocInfo->rbsAlloc;
29760 if ((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) == cnsmdBytes)
29765 /* Get iTbs as suitable for the consumed bytes */
29766 while((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) > cnsmdBytes)
29770 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].\
29771 tbCb->dlGrnt.iMcs);
29777 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].tbCb->dlGrnt.iMcs);
29784 * @brief This function determines the RBs and Bytes required for
29785 * Transmission on 2 CWs.
29789 * Function: rgSCHCmnDlAlloc2CwTxRb
29790 * Purpose: This function determines the RBs and Bytes required
29791 * for Transmission of DL SVC BO on 2 CWs.
29792 * Also, takes care of SVC by SVC allocation by tracking
29793 * previous SVCs allocations.
29794 * Returns RFAILED if BO not satisfied at all.
29796 * Invoked by: TM3 and TM4 DL UE Allocation
29798 * @param[in] RgSchCellCb *cell
29799 * @param[in] RgSchDlSf *subFrm
29800 * @param[in] RgSchUeCb *ue
29801 * @param[in] RgSchDlHqProcCb *proc
29802 * @param[in] RgSchDlHqProcCb bo
29803 * @param[out] U8 *numRb
29804 * @param[out] U32 *effBo
29809 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb
29814 RgSchDlHqProcCb *proc,
29820 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, numRbRef, effBo)
29824 RgSchDlHqProcCb *proc;
29836 RgSchCmnDlCell *cellDl;
29837 RgSchCmnDlUe *ueDl;
29838 RgSchDlRbAlloc *allocInfo;
29841 /* Fix: MUE_PERTTI_DL */
29843 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
29844 U8 cfi = cellSch->dl.currCfi;
29851 TRC2(rgSCHCmnDlAlloc2CwTxRb);
29854 cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
29855 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29856 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29857 oldReq = ueDl->outStndAlloc;
29860 if (ueDl->maxTbBits > ue->dl.aggTbBits)
29862 availBits = ueDl->maxTbBits - ue->dl.aggTbBits;
29864 /* check if we can further allocate to this UE */
29865 if ((ue->dl.aggTbBits >= ueDl->maxTbBits) ||
29866 (allocInfo->tbInfo[0].bytesReq >= ueDl->maxTbSz/8) ||
29867 (allocInfo->tbInfo[1].bytesReq >= ueDl->maxTbSz/8) ||
29868 (allocInfo->rbsReq >= ueDl->maxRb))
29870 RLOG_ARG0(L_DEBUG,DBG_CELLID,cell->cellId,
29871 "rgSCHCmnDlAllocRb(): UEs max allocation exceed");
29875 noLyr1 = ueDl->mimoInfo.cwInfo[0].noLyr;
29876 noLyr2 = ueDl->mimoInfo.cwInfo[1].noLyr;
29878 /* If there is no CFI change, continue to use the BLER based
29880 if (ueDl->lastCfi == cfi)
29882 iTbs1 = ueDl->mimoInfo.cwInfo[0].iTbs[noLyr1 - 1];
29883 iTbs2 = ueDl->mimoInfo.cwInfo[1].iTbs[noLyr2 - 1];
29887 U8 cqi = ueDl->mimoInfo.cwInfo[0].cqi;
29889 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 0, noLyr1);
29891 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 0, noLyr1);
29894 cqi = ueDl->mimoInfo.cwInfo[1].cqi;
29896 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 1, noLyr2);
29898 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 1, noLyr2);
29902 /*ccpu00131191 and ccpu00131317 - Fix for RRC Reconfig failure
29903 * issue for VoLTE call */
29904 //if ((proc->hasDcch) || (TRUE == rgSCHLaaSCellEnabled(cell)))
29924 else if(!cellSch->dl.isDlFreqSel)
29927 /* for Tdd reduce iTbs only for SF0. SF5 contains only
29928 * SSS and can be ignored */
29929 if (subFrm->sfNum == 0)
29931 (iTbs1 > 1)? (iTbs1 -= 1) : (iTbs1 = 0);
29932 (iTbs2 > 1)? (iTbs2 -= 1) : (iTbs2 = 0);
29934 /* For SF 3 and 8 CRC is getting failed in DL.
29935 Need to do proper fix after the replay from
29937 #ifdef CA_PHY_BRDCM_61765
29938 if ((subFrm->sfNum == 3) || (subFrm->sfNum == 8))
29940 (iTbs1 > 2)? (iTbs1 -= 2) : (iTbs1 = 0);
29941 (iTbs2 > 2)? (iTbs2 -= 2) : (iTbs2 = 0);
29949 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
29951 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
29955 eff1 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr1 - 1][cfi]))[iTbs1];
29956 eff2 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr2 - 1][cfi]))[iTbs2];
29959 bo = RGSCH_MIN(bo,availBits/8);
29960 ueDl->outStndAlloc += bo;
29961 /* consider Cumulative amount of this BO and bytes so far allocated */
29962 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbBits/8);
29963 bo = RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff1)/(eff1+eff2)),
29965 RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff2)/(eff1+eff2)),
29966 (ueDl->maxTbSz)/8) +
29967 1; /* Add 1 to adjust the truncation at weighted averaging */
29968 /* Get the number of REs needed for this bo. */
29969 noRes = ((bo * 8 * 1024) / (eff1 + eff2));
29971 /* Get the number of RBs needed for this transmission */
29972 /* Number of RBs = No of REs / No of REs per RB */
29973 numRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29974 /* Cannot exceed the maximum number of RBs per UE */
29975 if (numRb > ueDl->maxRb)
29977 numRb = ueDl->maxRb;
29982 if(RFAILED == rgSCHLaaCmn2CwAdjustPrb(allocInfo, boTmp, &numRb, ueDl, noLyr1, noLyr2, iTbs1, iTbs2))
29985 while ((numRb <= ueDl->maxRb) &&
29986 (rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1] <= ueDl->maxTbSz) &&
29987 (rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1] <= ueDl->maxTbSz) &&
29988 ((rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8 +
29989 rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8) <= bo))
29995 availBw = subFrm->bw - subFrm->bwAssigned;
29996 /* Cannot exceed the total number of RBs in the cell */
29997 if ((S16)(numRb - allocInfo->rbsReq) > availBw)
29999 numRb = availBw + allocInfo->rbsReq;
30001 tb1Sz = rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8;
30002 tb2Sz = rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8;
30003 /* DwPts Scheduling Changes Start */
30005 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
30007 /* Max Rb for Special Sf is approximated as 4/3 of maxRb */
30008 rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, (U8*)&numRb, ueDl->maxRb*4/3,
30009 &iTbs1, &iTbs2, noLyr1,
30010 noLyr2, &tb1Sz, &tb2Sz, cfi);
30011 /* Check for available Bw */
30012 if ((S16)numRb - allocInfo->rbsReq > availBw)
30014 numRb = availBw + allocInfo->rbsReq;
30015 tb1Sz = rgTbSzTbl[noLyr1-1][iTbs1][RGSCH_MAX(numRb*3/4,1)-1]/8;
30016 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs2][RGSCH_MAX(numRb*3/4,1)-1]/8;
30020 /* DwPts Scheduling Changes End */
30021 /* Update the subframe Allocated BW field */
30022 subFrm->bwAssigned = subFrm->bwAssigned + numRb - \
30025 *effBo = RGSCH_MIN((tb1Sz + tb2Sz) - oldReq, reqBytes);
30028 if (ROK != rgSCHLaaCmn2TBPrbCheck(allocInfo, tb1Sz, tb2Sz, boTmp, effBo, iTbs1, iTbs2, numRb, proc))
30034 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs1, imcs1);
30035 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs2, imcs2);
30036 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tb1Sz, \
30037 iTbs1, imcs1, &proc->tbInfo[0], noLyr1);
30038 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30039 iTbs2, imcs2, &proc->tbInfo[1], noLyr2);
30040 *numRbRef = (U8)numRb;
30048 * @brief This function determines the RBs and Bytes required for
30049 * Transmission & Retransmission on 2 CWs.
30053 * Function: rgSCHCmnDlAlloc2CwTxRetxRb
30054 * Purpose: This function determines the RBs and Bytes required
30055 * for Transmission & Retransmission on 2 CWs. Allocate
30056 * RETX TB on a better CW and restrict new TX TB by
30058 * Returns RFAILED if BO not satisfied at all.
30060 * Invoked by: TM3 and TM4 DL UE Allocation
30062 * @param[in] RgSchCellCb *cell
30063 * @param[in] RgSchDlSf *subFrm
30064 * @param[in] RgSchUeCb *ue
30065 * @param[in] RgSchDlHqTbCb *reTxTb
30066 * @param[in] RgSchDlHqTbCb *txTb
30067 * @param[out] U8 *numRb
30068 * @param[out] U32 *effBo
30073 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb
30078 RgSchDlHqTbCb *reTxTb,
30079 RgSchDlHqTbCb *txTb,
30084 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, reTxTb, txTb, numRb,\
30089 RgSchDlHqTbCb *reTxTb;
30090 RgSchDlHqTbCb *txTb;
30095 RgSchCmnDlUe *ueDl;
30096 RgSchDlRbAlloc *allocInfo;
30100 RgSchCmnDlUeCwInfo *otherCw;
30102 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
30103 U8 cfi = cellDl->currCfi;
30106 TRC2(rgSCHCmnDlAlloc2CwTxRetxRb);
30108 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
30109 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30110 otherCw = &ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)];
30113 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30114 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30116 availBw = subFrm->bw - subFrm->bwAssigned;
30117 *numRb = reTxTb->dlGrnt.numRb;
30119 #ifdef XEON_TDD_SPCL
30120 *numRb = (reTxTb->initTxNumRbs);
30121 if(reTxTb->sfType == RG_SCH_SPL_SF_DATA && subFrm->sfType != RG_SCH_SPL_SF_DATA)
30123 *numRb = (reTxTb->initTxNumRbs*3/4);
30127 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30133 if ((S16)*numRb > availBw)
30137 /* Update the subframe Allocated BW field */
30138 subFrm->bwAssigned += *numRb;
30139 noLyr2 = otherCw->noLyr;
30140 RG_SCH_CMN_GET_MCS_FOR_RETX(reTxTb, imcs1);
30142 /* If there is no CFI change, continue to use the BLER based
30144 if (ueDl->lastCfi == cfi)
30146 iTbs = otherCw->iTbs[noLyr2-1];
30151 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, otherCw->cqi, cfi,
30152 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30154 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, otherCw->cqi, cfi,
30155 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30158 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs][*numRb-1]/8;
30159 /* DwPts Scheduling Changes Start */
30162 /* DwPts Scheduling Changes End */
30163 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs2);
30165 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], reTxTb->tbSz, \
30166 0, imcs1, reTxTb, reTxTb->numLyrs);
30168 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30169 iTbs, imcs2, txTb, noLyr2);
30171 *effBo = reTxTb->tbSz + tb2Sz;
30178 * @brief This function determines the RBs and Bytes required for BO
30179 * Retransmission on 2 CWs.
30183 * Function: rgSCHCmnDlAlloc2CwRetxRb
30184 * Purpose: This function determines the RBs and Bytes required
30185 * for BO Retransmission on 2 CWs. Allocate larger TB
30186 * on a better CW and check if the smaller TB can be
30187 * accomodated on the other CW.
30188 * Returns RFAILED if BO not satisfied at all.
30190 * Invoked by: Common Scheduler
30192 * @param[in] RgSchCellCb *cell
30193 * @param[in] RgSchDlSf *subFrm
30194 * @param[in] RgSchUeCb *ue
30195 * @param[in] RgSchDlHqProcCb *proc
30196 * @param[out] U8 *numRb
30197 * @param[out] Bool *swpFlg
30198 * @param[out] U32 *effBo
30203 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb
30208 RgSchDlHqProcCb *proc,
30214 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc,\
30215 numRb, swpFlg, effBo)
30219 RgSchDlHqProcCb *proc;
30225 RgSchDlRbAlloc *allocInfo;
30228 RgSchDlHqTbCb *lrgTbInfo, *othrTbInfo;
30230 TRC2(rgSCHCmnDlAlloc2CwRetxRb);
30232 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30235 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30236 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30238 lrgTbInfo = &proc->tbInfo[0];
30239 othrTbInfo = &proc->tbInfo[1];
30240 *numRb = lrgTbInfo->dlGrnt.numRb;
30241 #ifdef XEON_TDD_SPCL
30242 if((lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA || othrTbInfo->sfType == RG_SCH_SPL_SF_DATA))
30244 if(lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA)
30246 *numRb = (lrgTbInfo->initTxNumRbs);
30250 *numRb = (othrTbInfo->initTxNumRbs);
30253 if(subFrm->sfType != RG_SCH_SPL_SF_DATA)
30255 *numRb = (*numRb)*3/4;
30260 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30265 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30269 /* Update the subframe Allocated BW field */
30270 subFrm->bwAssigned += *numRb;
30271 RG_SCH_CMN_GET_MCS_FOR_RETX(lrgTbInfo, imcs1);
30272 RG_SCH_CMN_GET_MCS_FOR_RETX(othrTbInfo, imcs2);
30273 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], lrgTbInfo->tbSz, \
30274 0, imcs1, lrgTbInfo, lrgTbInfo->numLyrs);
30275 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], othrTbInfo->tbSz, \
30276 0, imcs2, othrTbInfo, othrTbInfo->numLyrs);
30277 *effBo = lrgTbInfo->tbSz + othrTbInfo->tbSz;
30286 * @brief This function determines the RBs and Bytes required for BO
30287 * Retransmission on 1 CW.
30291 * Function: rgSCHCmnDlAlloc1CwRetxRb
30292 * Purpose: This function determines the RBs and Bytes required
30293 * for BO Retransmission on 1 CW, the first CW.
30294 * Returns RFAILED if BO not satisfied at all.
30296 * Invoked by: Common Scheduler
30298 * @param[in] RgSchCellCb *cell
30299 * @param[in] RgSchDlSf *subFrm
30300 * @param[in] RgSchUeCb *ue
30301 * @param[in] RgSchDlHqTbCb *tbInfo
30302 * @param[in] U8 noLyr
30303 * @param[out] U8 *numRb
30304 * @param[out] U32 *effBo
30309 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb
30314 RgSchDlHqTbCb *tbInfo,
30320 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, tbInfo, noLyr,\
30325 RgSchDlHqTbCb *tbInfo;
30331 RgSchDlRbAlloc *allocInfo;
30334 TRC2(rgSCHCmnDlAlloc1CwRetxRb);
30336 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30339 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30340 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30342 *numRb = tbInfo->dlGrnt.numRb;
30343 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30347 /* Update the subframe Allocated BW field */
30348 subFrm->bwAssigned += *numRb;
30349 imcs = tbInfo->dlGrnt.iMcs;
30350 allocInfo->dciFormat = tbInfo->dlGrnt.dciFormat;
30351 /* Fix: For a RETX TB the iTbs is irrelevant, hence setting 0 */
30352 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbInfo->tbSz, \
30353 0, imcs, tbInfo, tbInfo->numLyrs);
30354 *effBo = tbInfo->tbSz;
30362 * @brief This function is called to handle Release PDCCH feedback for SPS UE
30366 * Function: rgSCHCmnDlRelPdcchFbk
30367 * Purpose: Invokes SPS module to handle release PDCCH feedback
30371 * @param[in] RgSchCellCb *cell
30372 * @param[in] RgSchUeCb *ue
30373 * @param[in] Bool isAck
30378 PUBLIC Void rgSCHCmnDlRelPdcchFbk
30385 PUBLIC Void rgSCHCmnDlRelPdcchFbk(cell, ue, isAck)
30392 TRC2(rgSCHCmnDlRelPdcchFbk);
30393 rgSCHCmnSpsDlRelPdcchFbk(cell, ue, isAck);
30400 * @brief This function is invoked to handle Ack processing for a HARQ proc.
30404 * Function: rgSCHCmnDlProcAck
30405 * Purpose: DTX processing for HARQ proc
30409 * @param[in] RgSchCellCb *cell
30410 * @param[in] RgSchDlHqProcCb *hqP
30415 PUBLIC Void rgSCHCmnDlProcAck
30418 RgSchDlHqProcCb *hqP
30421 PUBLIC Void rgSCHCmnDlProcAck(cell, hqP)
30423 RgSchDlHqProcCb *hqP;
30427 TRC2(rgSCHCmnDlProcAck);
30429 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
30431 /* Invoke SPS module if SPS service was scheduled for this HARQ proc */
30432 rgSCHCmnSpsDlProcAck(cell, hqP);
30436 #ifdef RGSCH_SPS_STATS
30437 extern U32 rgSchStatCrntiCeRcvCnt;
30440 * @brief This function is invoked to handle CRNTI CE reception for an UE
30444 * Function: rgSCHCmnHdlCrntiCE
30445 * Purpose: Handle CRNTI CE reception
30449 * @param[in] RgSchCellCb *cell
30450 * @param[in] RgSchDlHqProcCb *hqP
30455 PUBLIC Void rgSCHCmnHdlCrntiCE
30461 PUBLIC Void rgSCHCmnHdlCrntiCE(cell, ue)
30467 TRC2(rgSCHCmnHdlCrntiCE);
30468 #ifdef RGSCH_SPS_STATS
30469 rgSchStatCrntiCeRcvCnt++;
30472 /* When UL sync lost happened due to TA timer expiry UE is being moved to
30473 PDCCH order inactivity list.But when CRNTI CE received in msg3 from UE
30474 we are not moving UE into active state due to that RRC Reconfiguration is
30476 So here we are moving UE to active list whenever we receive the CRNTI CE and
30478 /* CR ccpu00144525 */
30479 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
30481 /* Activate this UE if it was inactive */
30482 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30483 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30486 /* Handling is same as reception of UE RESET for both DL and UL */
30487 if (ue->dl.dlSpsCfg.isDlSpsEnabled)
30489 rgSCHCmnSpsDlUeReset(cell, ue);
30491 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30493 rgSCHCmnSpsUlUeReset(cell, ue);
30501 * @brief This function is called to handle relInd from MAC for a UE
30505 * Function: rgSCHCmnUlSpsRelInd
30506 * Purpose: Invokes SPS module to handle UL SPS release for a UE
30508 * Invoked by: SCH_UTL
30510 * @param[in] RgSchCellCb *cell
30511 * @param[in] RgSchUeCb *ue
30512 * @param[in] Bool isExplRel
30517 PUBLIC Void rgSCHCmnUlSpsRelInd
30524 PUBLIC Void rgSCHCmnUlSpsRelInd(cell, ue, isExplRel)
30531 TRC2(rgSCHCmnUlSpsRelInd);
30532 rgSCHCmnSpsUlProcRelInd(cell, ue, isExplRel);
30535 } /* end of rgSCHCmnUlSpsRelInd */
30538 * @brief This function is called to handle SPS Activate Ind from MAC for a UE
30542 * Function: rgSCHCmnUlSpsActInd
30543 * Purpose: Invokes SPS module to handle UL SPS activate for a UE
30545 * Invoked by: SCH_UTL
30547 * @param[in] RgSchCellCb *cell
30548 * @param[in] RgSchUeCb *ue
30553 PUBLIC Void rgSCHCmnUlSpsActInd
30560 PUBLIC Void rgSCHCmnUlSpsActInd(cell, ue,spsSduSize)
30567 TRC2(rgSCHCmnUlSpsActInd);
30569 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30571 rgSCHCmnSpsUlProcActInd(cell, ue,spsSduSize);
30575 } /* end of rgSCHCmnUlSpsActInd */
30578 * @brief This function is called to handle CRC in UL for UEs
30579 * undergoing SPS release
30583 * Function: rgSCHCmnUlCrcInd
30584 * Purpose: Invokes SPS module to handle CRC in UL for SPS UE
30586 * Invoked by: SCH_UTL
30588 * @param[in] RgSchCellCb *cell
30589 * @param[in] RgSchUeCb *ue
30590 * @param[in] CmLteTimingInfo crcTime
30595 PUBLIC Void rgSCHCmnUlCrcInd
30599 CmLteTimingInfo crcTime
30602 PUBLIC Void rgSCHCmnUlCrcInd(cell, ue, crcTime)
30605 CmLteTimingInfo crcTime;
30609 TRC2(rgSCHCmnUlCrcInd);
30610 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30612 rgSCHCmnSpsUlProcCrcInd(cell, ue, crcTime);
30616 } /* end of rgSCHCmnUlCrcFailInd */
30619 * @brief This function is called to handle CRC failure in UL
30623 * Function: rgSCHCmnUlCrcFailInd
30624 * Purpose: Invokes SPS module to handle CRC failure in UL for SPS UE
30626 * Invoked by: SCH_UTL
30628 * @param[in] RgSchCellCb *cell
30629 * @param[in] RgSchUeCb *ue
30630 * @param[in] CmLteTimingInfo crcTime
30635 PUBLIC Void rgSCHCmnUlCrcFailInd
30639 CmLteTimingInfo crcTime
30642 PUBLIC Void rgSCHCmnUlCrcFailInd(cell, ue, crcTime)
30645 CmLteTimingInfo crcTime;
30649 TRC2(rgSCHCmnUlCrcFailInd);
30650 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30652 rgSCHCmnSpsUlProcDtxInd(cell, ue, crcTime);
30656 } /* end of rgSCHCmnUlCrcFailInd */
30658 #endif /* LTEMAC_SPS */
30661 * @brief BCH,BCCH,PCCH Dowlink Scheduling Handler.
30665 * Function: rgSCHCmnDlBcchPcchAlloc
30666 * Purpose: This function calls common scheduler APIs to
30667 * schedule for BCCH/PCCH.
30668 * It then invokes Allocator for actual RB
30669 * allocations. It processes on the actual resources allocated
30670 * against requested to the allocator module.
30672 * Invoked by: Common Scheduler
30674 * @param[in] RgSchCellCb *cell
30678 PRIVATE Void rgSCHCmnDlBcchPcchAlloc
30683 PRIVATE Void rgSCHCmnDlBcchPcchAlloc(cell)
30688 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
30690 #ifdef LTEMAC_HDFDD
30691 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
30693 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
30696 RgInfSfAlloc *nextsfAlloc = &(cell->sfAllocArr[nextSfIdx]);
30697 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30698 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
30700 TRC2(rgSCHCmnDlBcchPcchAlloc);
30703 /*Reset the bitmask for BCCH/PCCH*/
30704 rgSCHUtlResetSfAlloc(nextsfAlloc,TRUE,FALSE);
30705 #ifndef DISABLE_MIB_SIB /* Not sending MIB and SIB to CL */
30707 rgSCHChkNUpdSiCfg(cell);
30708 rgSCHSelectSi(cell);
30711 /*Perform the scheduling for BCCH,PCCH*/
30712 rgSCHCmnDlBcchPcch(cell, allocInfo, nextsfAlloc);
30714 /* Call common allocator for RB Allocation */
30715 rgSCHBcchPcchDlRbAlloc(cell, allocInfo);
30717 /* Finalize the Allocations for reqested Against alloced */
30718 rgSCHCmnDlBcchPcchFnlz(cell, allocInfo);
30719 #endif /* DISABLE_MIB_SIB */
30724 * @brief Handles RB allocation for BCCH/PCCH for downlink.
30728 * Function : rgSCHBcchPcchDlRbAlloc
30730 * Invoking Module Processing:
30731 * - This function is invoked for DL RB allocation of BCCH/PCCH
30733 * Processing Steps:
30734 * - If cell is frequency selecive,
30735 * - Call rgSCHDlfsBcchPcchAllocRb().
30737 * - Do the processing
30739 * @param[in] RgSchCellCb *cell
30740 * @param[in] RgSchDlRbAllocInfo *allocInfo
30745 PRIVATE Void rgSCHBcchPcchDlRbAlloc
30748 RgSchCmnDlRbAllocInfo *allocInfo
30751 PRIVATE Void rgSCHBcchPcchDlRbAlloc(cell, allocInfo)
30753 RgSchCmnDlRbAllocInfo *allocInfo;
30756 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30758 TRC2(rgSCHBcchPcchDlRbAlloc);
30761 if (cellSch->dl.isDlFreqSel)
30763 cellSch->apisDlfs->rgSCHDlfsBcchPcchAllocRb(cell, allocInfo);
30767 rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo);
30774 * @brief Handles RB allocation for BCCH,PCCH for frequency
30775 * non-selective cell.
30779 * Function : rgSCHCmnNonDlfsBcchPcchRbAlloc
30781 * Invoking Module Processing:
30782 * - SCH shall invoke this if downlink frequency selective is disabled for
30783 * the cell for RB allocation.
30784 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
30785 * estimate and subframe for each allocation to be made to SCH.
30787 * Processing Steps:
30788 * - Allocate sequentially for BCCH,PCCH common channels.
30790 * @param[in] RgSchCellCb *cell
30791 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
30796 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc
30799 RgSchCmnDlRbAllocInfo *allocInfo
30802 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo)
30804 RgSchCmnDlRbAllocInfo *allocInfo;
30807 RgSchDlRbAlloc *reqAllocInfo;
30809 TRC2(rgSCHCmnNonDlfsBcchPcchRbAlloc);
30812 /* Allocate for PCCH */
30813 reqAllocInfo = &(allocInfo->pcchAlloc);
30814 if (reqAllocInfo->rbsReq)
30816 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30818 /* Allocate for BCCH on DLSCH */
30819 reqAllocInfo = &(allocInfo->bcchAlloc);
30820 if (reqAllocInfo->rbsReq)
30822 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30830 * @brief This function implements the handling to check and
30831 * update the SI cfg at the start of the modificiation period.
30835 * Function: rgSCHChkNUpdSiCfg
30836 * Purpose: This function implements handling for update of SI Cfg
30837 * at the start of modification period.
30839 * Invoked by: Scheduler
30841 * @param[in] RgSchCellCb* cell
30847 PRIVATE Void rgSCHChkNUpdSiCfg
30852 PRIVATE Void rgSCHChkNUpdSiCfg(cell)
30856 CmLteTimingInfo pdSchTmInfo;
30858 TRC2(rgSCHChkNUpdSiCfg);
30861 pdSchTmInfo = cell->crntTime;
30862 #ifdef LTEMAC_HDFDD
30863 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
30864 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
30865 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
30867 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA);
30871 /* Updating the SIB1 for Warning SI message immediately after it is received
30872 * from application. No need to wait for next modification period.
30874 if((pdSchTmInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
30875 && (RGSCH_SIB1_TX_SF_NUM == (pdSchTmInfo.slot % RGSCH_NUM_SUB_FRAMES)))
30877 /*Check whether SIB1 with PWS has been updated*/
30878 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_PWS_UPD)
30880 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30881 cell->siCb.newSiInfo.sib1Info.sib1);
30882 cell->siCb.crntSiInfo.sib1Info.mcs =
30883 cell->siCb.newSiInfo.sib1Info.mcs;
30884 cell->siCb.crntSiInfo.sib1Info.nPrb =
30885 cell->siCb.newSiInfo.sib1Info.nPrb;
30886 cell->siCb.crntSiInfo.sib1Info.msgLen =
30887 cell->siCb.newSiInfo.sib1Info.msgLen;
30888 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_PWS_UPD;
30892 /*Check if this SFN and SF No marks the start of next modification
30893 period. If current SFN,SF No doesn't marks the start of next
30894 modification period, then return. */
30895 if(!((pdSchTmInfo.sfn % cell->siCfg.modPrd == 0)
30896 && (0 == pdSchTmInfo.slot)))
30897 /*if(!((((pdSchTmInfo.hSfn * 1024) + pdSchTmInfo.sfn) % cell->siCfg.modPrd == 0)
30898 && (0 == pdSchTmInfo.slot)))*/
30903 /*Check whether MIB has been updated*/
30904 if(cell->siCb.siBitMask & RGSCH_SI_MIB_UPD)
30906 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.mib,
30907 cell->siCb.newSiInfo.mib);
30908 cell->siCb.siBitMask &= ~RGSCH_SI_MIB_UPD;
30911 /*Check whether SIB1 has been updated*/
30912 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_UPD)
30914 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30915 cell->siCb.newSiInfo.sib1Info.sib1);
30916 cell->siCb.crntSiInfo.sib1Info.mcs = cell->siCb.newSiInfo.sib1Info.mcs;
30917 cell->siCb.crntSiInfo.sib1Info.nPrb = cell->siCb.newSiInfo.sib1Info.nPrb;
30918 cell->siCb.crntSiInfo.sib1Info.msgLen =
30919 cell->siCb.newSiInfo.sib1Info.msgLen;
30920 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_UPD;
30923 /*Check whether SIs have been updated*/
30924 if(cell->siCb.siBitMask & RGSCH_SI_SI_UPD)
30928 /*Check if SI cfg have been modified And Check if numSi have
30929 been changed, if yes then we would need to update the
30930 pointers for all the SIs */
30931 if((cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD) &&
30932 (cell->siCfg.numSi != cell->siCb.newSiCfg.numSi))
30934 for(idx = 0;idx < cell->siCb.newSiCfg.numSi;idx++)
30936 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30937 cell->siCb.newSiInfo.siInfo[idx].si);
30938 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30939 cell->siCb.siArray[idx].isWarningSi = FALSE;
30941 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30942 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30943 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30946 /*If numSi have been reduced then we need to free the
30947 pointers at the indexes in crntSiInfo which haven't
30948 been exercised. If numSi has increased then nothing
30949 additional is requires as above handling has taken
30951 if(cell->siCfg.numSi > cell->siCb.newSiCfg.numSi)
30953 for(idx = cell->siCb.newSiCfg.numSi;
30954 idx < cell->siCfg.numSi;idx++)
30956 RGSCH_FREE_MSG(cell->siCb.crntSiInfo.siInfo[idx].si);
30957 cell->siCb.siArray[idx].si = NULLP;
30963 /*numSi has not been updated, we just need to update the
30964 pointers for the SIs which are set to NON NULLP */
30965 /*ccpu00118260 - Correct Update of SIB2 */
30966 for(idx = 0;idx < cell->siCfg.numSi;idx++)
30968 if(NULLP != cell->siCb.newSiInfo.siInfo[idx].si)
30970 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30971 cell->siCb.newSiInfo.siInfo[idx].si);
30973 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30974 cell->siCb.siArray[idx].isWarningSi = FALSE;
30975 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30976 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30977 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30981 cell->siCb.siBitMask &= ~RGSCH_SI_SI_UPD;
30984 /*Check whether SI cfg have been updated*/
30985 if(cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD)
30987 cell->siCfg = cell->siCb.newSiCfg;
30988 cell->siCb.siBitMask &= ~RGSCH_SI_SICFG_UPD;
30996 * @brief This function implements the selection of the SI
30997 * that is to be scheduled.
31001 * Function: rgSCHSelectSi
31002 * Purpose: This function implements the selection of SI
31003 * that is to be scheduled.
31005 * Invoked by: Scheduler
31007 * @param[in] RgSchCellCb* cell
31013 PRIVATE Void rgSCHSelectSi
31018 PRIVATE Void rgSCHSelectSi(cell)
31022 CmLteTimingInfo crntTmInfo;
31027 TRC2(rgSCHSelectSi);
31030 crntTmInfo = cell->crntTime;
31031 #ifdef LTEMAC_HDFDD
31032 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31033 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31034 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31036 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA);
31039 siWinSize = cell->siCfg.siWinSize;
31041 /* Select SI only once at the starting of the new window */
31042 if(cell->siCb.inWindow)
31044 if ((crntTmInfo.sfn % cell->siCfg.minPeriodicity) == 0 &&
31045 crntTmInfo.slot == 0)
31047 /* Reinit inWindow at the beginning of every SI window */
31048 cell->siCb.inWindow = siWinSize - 1;
31052 cell->siCb.inWindow--;
31056 else /* New window. Re-init the winSize counter with the window length */
31058 if((cell->siCb.siArray[cell->siCb.siCtx.siId - 1].isWarningSi == TRUE)&&
31059 (cell->siCb.siCtx.retxCntRem != 0))
31061 rgSCHUtlFreeWarningSiPdu(cell);
31062 cell->siCb.siCtx.warningSiFlag = FALSE;
31065 cell->siCb.inWindow = siWinSize - 1;
31068 x = rgSCHCmnGetSiSetId(crntTmInfo.sfn, crntTmInfo.slot,
31069 cell->siCfg.minPeriodicity);
31071 /* Window Id within a SI set. This window Id directly maps to a
31073 windowId = (((crntTmInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) +
31074 crntTmInfo.slot) - (x * (cell->siCfg.minPeriodicity * 10)))
31077 if(windowId >= RGR_MAX_NUM_SI)
31080 /* Update the siCtx if there is a valid SI and its periodicity
31082 if (NULLP != cell->siCb.siArray[windowId].si)
31084 /* Warning SI Periodicity is same as SIB2 Periodicity */
31085 if(((cell->siCb.siArray[windowId].isWarningSi == FALSE) &&
31086 (x % (cell->siCfg.siPeriodicity[windowId]
31087 /cell->siCfg.minPeriodicity) == 0)) ||
31088 ((cell->siCb.siArray[windowId].isWarningSi == TRUE) &&
31089 (x % (cell->siCfg.siPeriodicity[0]
31090 /cell->siCfg.minPeriodicity) == 0)))
31092 cell->siCb.siCtx.siId = windowId+1;
31093 cell->siCb.siCtx.retxCntRem = cell->siCfg.retxCnt;
31094 cell->siCb.siCtx.warningSiFlag = cell->siCb.siArray[windowId].
31096 cell->siCb.siCtx.timeToTx.sfn = crntTmInfo.sfn;
31097 cell->siCb.siCtx.timeToTx.slot = crntTmInfo.slot;
31099 RG_SCH_ADD_TO_CRNT_TIME(cell->siCb.siCtx.timeToTx,
31100 cell->siCb.siCtx.maxTimeToTx, (siWinSize - 1))
31104 {/* Update the siCtx with invalid si Id */
31105 cell->siCb.siCtx.siId = 0;
31113 * @brief This function implements scheduler DL allocation for
31118 * Function: rgSCHDlSiSched
31119 * Purpose: This function implements scheduler for DL allocation
31122 * Invoked by: Scheduler
31124 * @param[in] RgSchCellCb* cell
31130 PRIVATE Void rgSCHDlSiSched
31133 RgSchCmnDlRbAllocInfo *allocInfo,
31134 RgInfSfAlloc *subfrmAlloc
31137 PRIVATE Void rgSCHDlSiSched(cell, allocInfo, subfrmAlloc)
31139 RgSchCmnDlRbAllocInfo *allocInfo;
31140 RgInfSfAlloc *subfrmAlloc;
31143 CmLteTimingInfo crntTimInfo;
31149 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31150 /* DwPTS Scheduling Changes Start */
31153 U8 cfi = cellDl->currCfi;
31155 /* DwPTS Scheduling Changes End */
31157 TRC2(rgSCHDlSiSched);
31160 crntTimInfo = cell->crntTime;
31161 #ifdef LTEMAC_HDFDD
31162 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31163 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31164 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31166 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA);
31169 /* Compute the subframe for which allocation is being made.
31170 Essentially, we need pointer to the dl frame for this subframe */
31171 sf = rgSCHUtlSubFrmGet(cell, crntTimInfo);
31173 /*Check if scheduling of MIB is required */
31175 /* since we are adding the MIB repetition logic for EMTC UEs, checking if
31176 * emtcEnabled or not, If enabled MIB would be repeted at as part of EMTC
31177 * feature, otherwise scheduling at (n,0) */
31178 if(0 == cell->emtcEnable)
31181 if((crntTimInfo.sfn % RGSCH_MIB_PERIODICITY == 0)
31182 && (RGSCH_MIB_TX_SF_NUM == crntTimInfo.slot))
31185 U8 sfnOctet, mibOct2 = 0;
31187 /*If MIB has not been yet setup by Application, return*/
31188 if(NULLP == cell->siCb.crntSiInfo.mib)
31191 SFndLenMsg(cell->siCb.crntSiInfo.mib, &mibLen);
31192 sf->bch.tbSize = mibLen;
31193 /*Fill the interface information */
31194 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, NULLD, NULLD);
31196 /*Set the bits of MIB to reflect SFN */
31197 /*First get the Most signficant 8 bits of SFN */
31198 sfnOctet = (U8)(crntTimInfo.sfn >> 2);
31199 /*Get the first two octets of MIB, and then update them
31200 using the SFN octet value obtained above.*/
31201 if(ROK != SExamMsg((Data *)(&mibOct1),
31202 cell->siCb.crntSiInfo.mib, 0))
31205 if(ROK != SExamMsg((Data *)(&mibOct2),
31206 cell->siCb.crntSiInfo.mib, 1))
31209 /* ccpu00114572- Fix for improper way of MIB Octet setting for SFN */
31210 mibOct1 = (mibOct1 & 0xFC) | (sfnOctet >> 6);
31211 mibOct2 = (mibOct2 & 0x03) | (sfnOctet << 2);
31212 /* ccpu00114572- Fix ends*/
31214 /*Now, replace the two octets in MIB */
31215 if(ROK != SRepMsg((Data)(mibOct1),
31216 cell->siCb.crntSiInfo.mib, 0))
31219 if(ROK != SRepMsg((Data)(mibOct2),
31220 cell->siCb.crntSiInfo.mib, 1))
31223 /*Copy the MIB msg buff into interface buffer */
31224 SCpyMsgMsg(cell->siCb.crntSiInfo.mib,
31225 rgSchCb[cell->instIdx].rgSchInit.region,
31226 rgSchCb[cell->instIdx].rgSchInit.pool,
31227 &subfrmAlloc->cmnLcInfo.bchInfo.pdu);
31228 /* Added Dl TB count for MIB message transmission
31229 * This counter is incremented 4 times to consider
31230 * the retransmission at the PHY level on PBCH channel*/
31232 cell->dlUlTbCnt.tbTransDlTotalCnt += RG_SCH_MIB_CNT;
31239 allocInfo->bcchAlloc.schdFirst = FALSE;
31240 /*Check if scheduling of SIB1 is required.
31241 Check of (crntTimInfo.sfn % RGSCH_SIB1_PERIODICITY == 0)
31242 is not required here since the below check takes care
31243 of SFNs applicable for this one too.*/
31244 if((crntTimInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
31245 && (RGSCH_SIB1_TX_SF_NUM == crntTimInfo.slot))
31247 /*If SIB1 has not been yet setup by Application, return*/
31248 if(NULLP == (cell->siCb.crntSiInfo.sib1Info.sib1))
31253 allocInfo->bcchAlloc.schdFirst = TRUE;
31254 mcs = cell->siCb.crntSiInfo.sib1Info.mcs;
31255 nPrb = cell->siCb.crntSiInfo.sib1Info.nPrb;
31256 msgLen = cell->siCb.crntSiInfo.sib1Info.msgLen;
31260 /*Check if scheduling of SI can be performed.*/
31261 Bool invalid = FALSE;
31263 if(cell->siCb.siCtx.siId == 0)
31266 /*Check if the Si-Window for the current Si-Context is completed*/
31267 invalid = rgSCHCmnChkPastWin(crntTimInfo, cell->siCb.siCtx.maxTimeToTx);
31270 /* LTE_ADV_FLAG_REMOVED_START */
31271 if(cell->siCb.siCtx.retxCntRem)
31273 RGSCHLOGERROR(cell->instIdx,ERRCLS_INT_PAR,ERG011,(ErrVal)cell->siCb.siCtx.siId,
31274 "rgSCHDlSiSched(): SI not scheduled and window expired");
31276 /* LTE_ADV_FLAG_REMOVED_END */
31277 if(cell->siCb.siCtx.warningSiFlag == TRUE)
31279 rgSCHUtlFreeWarningSiPdu(cell);
31280 cell->siCb.siCtx.warningSiFlag = FALSE;
31285 /*Check the timinginfo of the current SI-Context to see if its
31286 transmission can be scheduled. */
31287 if(FALSE == (rgSCHCmnChkInWin(crntTimInfo,
31288 cell->siCb.siCtx.timeToTx,
31289 cell->siCb.siCtx.maxTimeToTx)))
31294 /*Check if retransmission count has become 0*/
31295 if(0 == cell->siCb.siCtx.retxCntRem)
31300 /* LTE_ADV_FLAG_REMOVED_START */
31301 /* Check if ABS is enabled/configured */
31302 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31304 /* The pattern type is RGR_ABS_MUTE, then eNB need to blank the subframe */
31305 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
31307 /* Determine next scheduling subframe is ABS or not */
31308 if(RG_SCH_ABS_ENABLED_ABS_SF == (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern
31309 [((crntTimInfo.sfn*RGSCH_NUM_SUB_FRAMES) + crntTimInfo.slot) % RGR_ABS_PATTERN_LEN]))
31311 /* Skip the SI scheduling to next tti */
31316 /* LTE_ADV_FLAG_REMOVED_END */
31318 /*Schedule the transmission of the current SI-Context */
31319 /*Find out the messg length for the SI message */
31320 /* warningSiFlag is to differentiate between Warning SI
31322 if((rgSCHUtlGetMcsAndNPrb(cell, &nPrb, &mcs, &msgLen)) != ROK)
31327 cell->siCb.siCtx.i = RGSCH_CALC_SF_DIFF(crntTimInfo,
31328 cell->siCb.siCtx.timeToTx);
31332 /*Get the number of rb required */
31333 /*rgSCHCmnClcRbAllocForFxdTb(cell, msgLen, cellDl->ccchCqi, &rb);*/
31334 if(cellDl->bitsPerRb==0)
31336 while ((rgTbSzTbl[0][0][rb]) < (U32) (msgLen*8))
31344 rb = RGSCH_CEIL((msgLen*8), cellDl->bitsPerRb);
31346 /* DwPTS Scheduling Changes Start */
31348 if (sf->sfType == RG_SCH_SPL_SF_DATA)
31350 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
31352 /* Calculate the less RE's because of DwPTS */
31353 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
31355 /* Increase number of RBs in Spl SF to compensate for lost REs */
31356 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
31359 /* DwPTS Scheduling Changes End */
31360 /*ccpu00115595- end*/
31361 /* Additional check to see if required RBs
31362 * exceeds the available */
31363 if (rb > sf->bw - sf->bwAssigned)
31365 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHDlSiSched(): "
31366 "BW allocation failed CRNTI:%d",RGSCH_SI_RNTI);
31370 /* Update the subframe Allocated BW field */
31371 sf->bwAssigned = sf->bwAssigned + rb;
31373 /*Fill the parameters in allocInfo */
31374 allocInfo->bcchAlloc.rnti = RGSCH_SI_RNTI;
31375 allocInfo->bcchAlloc.dlSf = sf;
31376 allocInfo->bcchAlloc.rbsReq = rb;
31377 /*ccpu00116710- MCS is not getting assigned */
31378 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
31380 /* ccpu00117510 - ADD - Assignment of nPrb and other information */
31381 allocInfo->bcchAlloc.nPrb = nPrb;
31382 allocInfo->bcchAlloc.tbInfo[0].bytesReq = msgLen;
31383 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
31386 #endif /*RGR_SI_SCH*/
31389 /* ccpu00117452 - MOD - Changed macro name from
31390 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
31391 #ifdef RGR_CQI_REPT
31393 * @brief This function Updates the DL CQI for the UE.
31397 * Function: rgSCHCmnUeDlPwrCtColltCqiRept
31398 * Purpose: Manages PUSH N CQI reporting
31399 * Step 1: Store the CQI in collation array
31400 * Step 2: Increament the tracking count
31401 * Step 3: Check is it time to to send the report
31402 * Step 4: if yes, Send StaInd to RRM
31403 * Step 4.1: Fill StaInd for sending collated N CQI rpeorts
31404 * Step 4.2: Call utility function (rgSCHUtlRgrStaInd) to send rpts to RRM
31405 * Step 4.2.1: If sending was not sucessful, return RFAILED
31406 * Step 4.2.2: If sending was sucessful, return ROK
31407 * Step 5: If no, return
31408 * Invoked by: rgSCHCmnDlCqiInd
31410 * @param[in] RgSchCellCb *cell
31411 * @param[in] RgSchUeCb *ue
31412 * @param[in] RgrUeCqiRept *ueCqiRpt
31417 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept
31421 RgrUeCqiRept *ueCqiRpt
31424 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, ueCqiRpt)
31427 RgrUeCqiRept *ueCqiRpt;
31430 U8 *cqiCount = NULLP;
31432 RgrStaIndInfo *staInfo = NULLP;
31434 TRC2(rgSCHCmnUeDlPwrCtColltCqiRept)
31436 /* Step 1: Store the CQI in collation array */
31437 /* Step 2: Increament the tracking count */
31438 cqiCount = &(ue->schCqiInfo.cqiCount);
31439 ue->schCqiInfo.cqiRept[(*cqiCount)++] =
31443 /* Step 3: Check is it time to to send the report */
31444 if(RG_SCH_CQIR_IS_TIMTOSEND_CQIREPT(ue))
31446 /* Step 4: if yes, Send StaInd to RRM */
31447 retVal = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&staInfo,
31448 sizeof(RgrStaIndInfo));
31451 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
31452 "allocate memory for sending StaInd CRNTI:%d",ue->ueId);
31456 /* Step 4.1: Fill StaInd for sending collated N CQI rpeorts */
31459 extern U32 gCqiReptToAppCount;
31460 gCqiReptToAppCount++;
31465 retVal = rgSCHUtlFillSndStaInd(cell, ue, staInfo,
31466 ue->cqiReptCfgInfo.numColltdCqiRept);
31472 } /* End of rgSCHCmnUeDlPwrCtColltCqiRept */
31474 #endif /* End of RGR_CQI_REPT */
31477 * @brief This function checks for the retransmisson
31478 * for a DTX scenario.
31485 * @param[in] RgSchCellCb *cell
31486 * @param[in] RgSchUeCb *ue
31492 PUBLIC Void rgSCHCmnChkRetxAllowDtx
31496 RgSchDlHqProcCb *proc,
31500 PUBLIC Void rgSCHCmnChkRetxAllowDtx(cell, ueCb, proc, reTxAllwd)
31503 RgSchDlHqProcCb *proc;
31507 TRC3(rgSCHCmnChkRetxAllowDtx)
31512 if ((proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX))
31514 *reTxAllwd = FALSE;
31521 * @brief API for calculating the SI Set Id
31525 * Function: rgSCHCmnGetSiSetId
31527 * This API is used for calculating the SI Set Id, as shown below
31529 * siSetId = 0 siSetId = 1
31530 * |******************|******************|---------------->
31531 * (0,0) (8,0) (16,0) (SFN, SF)
31534 * @param[in] U16 sfn
31536 * @return U16 siSetId
31539 PUBLIC U16 rgSCHCmnGetSiSetId
31546 PUBLIC U16 rgSCHCmnGetSiSetId(sfn, sf, minPeriodicity)
31549 U16 minPeriodicity;
31552 /* 80 is the minimum SI periodicity in sf. Also
31553 * all other SI periodicities are multiples of 80 */
31554 RETVALUE (((sfn * RGSCH_NUM_SUB_FRAMES_5G) + sf) / (minPeriodicity * 10));
31558 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31562 * Function: rgSCHCmnCalcDwPtsTbSz
31564 * @param[in] RgSchCellCb *cell
31565 * @param[in] U32 bo
31566 * @param[in/out] U8 *rb
31567 * @param[in/out] U8 *iTbs
31568 * @param[in] U8 lyr
31569 * @param[in] U8 cfi
31573 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz
31583 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz(cell, bo, rb, iTbs, lyr, cfi)
31593 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31594 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31595 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31597 TRC2(rgSCHCmnCalcDwPtsTbSz);
31599 /* DwPts Rb cannot exceed the cell Bw */
31600 numDwPtsRb = RGSCH_MIN(numDwPtsRb, cellDl->maxDlBwPerUe);
31602 /* Adjust the iTbs for optimum usage of the DwPts region.
31603 * Using the same iTbs adjustment will not work for all
31604 * special subframe configurations and iTbs levels. Hence use the
31605 * static iTbs Delta table for adjusting the iTbs */
31606 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs);
31610 while(rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1] < bo*8 &&
31611 numDwPtsRb < cellDl->maxDlBwPerUe)
31616 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31620 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31628 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31632 * Function: rgSCHCmnCalcDwPtsTbSz2Cw
31634 * @param[in] RgSchCellCb *cell
31635 * @param[in] U32 bo
31636 * @param[in/out] U8 *rb
31637 * @param[in] U8 maxRb
31638 * @param[in/out] U8 *iTbs1
31639 * @param[in/out] U8 *iTbs2
31640 * @param[in] U8 lyr1
31641 * @param[in] U8 lyr2
31642 * @return[in/out] U32 *tb1Sz
31643 * @return[in/out] U32 *tb2Sz
31644 * @param[in] U8 cfi
31647 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw
31662 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, rb, maxRb, iTbs1, iTbs2,
31663 lyr1, lyr2, tb1Sz, tb2Sz, cfi)
31677 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31678 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31679 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31681 TRC2(rgSCHCmnCalcDwPtsTbSz2Cw);
31683 /* DwPts Rb cannot exceed the cell Bw */
31684 numDwPtsRb = RGSCH_MIN(numDwPtsRb, maxRb);
31686 /* Adjust the iTbs for optimum usage of the DwPts region.
31687 * Using the same iTbs adjustment will not work for all
31688 * special subframe configurations and iTbs levels. Hence use the
31689 * static iTbs Delta table for adjusting the iTbs */
31690 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs1);
31691 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs2);
31693 while((rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1] +
31694 rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1])< bo*8 &&
31695 numDwPtsRb < maxRb)
31700 *tb1Sz = rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31701 *tb2Sz = rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31711 * @brief Updates the GBR LCGs when datInd is received from MAC
31715 * Function: rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31716 * Purpose: This function updates the GBR LCGs
31717 * when datInd is received from MAC.
31721 * @param[in] RgSchCellCb *cell
31722 * @param[in] RgSchUeCb *ue
31723 * @param[in] RgInfUeDatInd *datInd
31727 PUBLIC Void rgSCHCmnUpdUeDataIndLcg
31731 RgInfUeDatInd *datInd
31734 PUBLIC Void rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31737 RgInfUeDatInd *datInd;
31741 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31743 Inst inst = cell->instIdx;
31746 TRC2(rgSCHCmnUpdUeDataIndLcg);
31748 for (idx = 0; (idx < RGINF_MAX_LCG_PER_UE - 1); idx++)
31750 if (datInd->lcgInfo[idx].bytesRcvd != 0)
31752 U8 lcgId = datInd->lcgInfo[idx].lcgId;
31753 U32 bytesRcvd = datInd->lcgInfo[idx].bytesRcvd;
31755 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
31757 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
31758 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
31760 if(bytesRcvd > cmnLcg->effGbr)
31762 bytesRcvd -= cmnLcg->effGbr;
31763 cmnLcg->effDeltaMbr = (cmnLcg->effDeltaMbr > bytesRcvd) ? \
31764 (cmnLcg->effDeltaMbr - bytesRcvd) : (0);
31765 cmnLcg->effGbr = 0;
31769 cmnLcg->effGbr -= bytesRcvd;
31771 /* To keep BS updated with the amount of data received for the GBR */
31772 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31773 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31774 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr+cmnLcg->effDeltaMbr);
31776 else if(lcgId != 0)
31778 ue->ul.effAmbr = (ue->ul.effAmbr > datInd->lcgInfo[idx].bytesRcvd) ? \
31779 (ue->ul.effAmbr - datInd->lcgInfo[idx].bytesRcvd) : (0);
31780 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31781 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31782 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
31783 ue->ul.nonGbrLcgBs = (ue->ul.nonGbrLcgBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31784 (ue->ul.nonGbrLcgBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31786 ue->ul.nonLcg0Bs = (ue->ul.nonLcg0Bs > datInd->lcgInfo[idx].bytesRcvd) ? \
31787 (ue->ul.nonLcg0Bs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31796 if(TRUE == ue->isEmtcUe)
31798 if (cellSch->apisEmtcUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31800 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31807 if (cellSch->apisUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31809 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31815 /** @brief This function initializes DL allocation lists and prepares
31820 * Function: rgSCHCmnInitRbAlloc
31822 * @param [in] RgSchCellCb *cell
31828 PRIVATE Void rgSCHCmnInitRbAlloc
31833 PRIVATE Void rgSCHCmnInitRbAlloc (cell)
31837 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31838 CmLteTimingInfo frm;
31842 TRC2(rgSCHCmnInitRbAlloc);
31844 /* Initializing RgSchCmnUlRbAllocInfo structure.*/
31845 rgSCHCmnInitDlRbAllocInfo(&cellSch->allocInfo);
31847 frm = cellSch->dl.time;
31849 dlSf = rgSCHUtlSubFrmGet(cell, frm);
31851 dlSf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
31852 dlSf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
31853 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
31855 dlSf->sfBeamInfo[idx].totVrbgAllocated = 0;
31856 dlSf->sfBeamInfo[idx].totVrbgRequired = 0;
31857 dlSf->sfBeamInfo[idx].vrbgStart = 0;
31860 dlSf->remUeCnt = cellSch->dl.maxUePerDlSf;
31861 /* Updating the Subframe information in RBAllocInfo */
31862 cellSch->allocInfo.dedAlloc.dedDlSf = dlSf;
31863 cellSch->allocInfo.msg4Alloc.msg4DlSf = dlSf;
31865 /* LTE_ADV_FLAG_REMOVED_START */
31866 /* Determine next scheduling subframe is ABS or not */
31867 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31869 cell->lteAdvCb.absPatternDlIdx =
31870 ((frm.sfn*RGSCH_NUM_SUB_FRAMES_5G) + frm.slot) % RGR_ABS_PATTERN_LEN;
31871 cell->lteAdvCb.absDlSfInfo = (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern[
31872 cell->lteAdvCb.absPatternDlIdx]);
31877 cell->lteAdvCb.absDlSfInfo = RG_SCH_ABS_DISABLED;
31879 /* LTE_ADV_FLAG_REMOVED_END */
31882 cellSch->allocInfo.ccchSduAlloc.ccchSduDlSf = dlSf;
31885 /* Update subframe-wide allocation information with SPS allocation */
31886 rgSCHCmnSpsDlUpdDlSfAllocWithSps(cell, frm, dlSf);
31895 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31900 * Function: rgSCHCmnSendTxModeInd(cell, ueUl, newTxMode)
31901 * Purpose: This function sends the TX mode Change
31902 * indication to RRM
31907 * @param[in] RgSchCellCb *cell
31908 * @param[in] RgSchUeCb *ue
31909 * @param[in] U8 newTxMode
31913 PRIVATE Void rgSCHCmnSendTxModeInd
31920 PRIVATE Void rgSCHCmnSendTxModeInd(cell, ue, newTxMode)
31926 RgmTransModeInd *txModeChgInd;
31927 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
31929 TRC2(rgSCHCmnSendTxModeInd);
31931 if(!(ueDl->mimoInfo.forceTD & RG_SCH_CMN_TD_TXMODE_RECFG))
31934 if(SGetSBuf(cell->rgmSap->sapCfg.sapPst.region,
31935 cell->rgmSap->sapCfg.sapPst.pool, (Data**)&txModeChgInd,
31936 sizeof(RgmTransModeInd)) != ROK)
31940 RG_SCH_FILL_RGM_TRANSMODE_IND(ue->ueId, cell->cellId, newTxMode, txModeChgInd);
31941 RgUiRgmChangeTransModeInd(&(cell->rgmSap->sapCfg.sapPst),
31942 cell->rgmSap->sapCfg.suId, txModeChgInd);
31945 ue->mimoInfo.txModUpChgFactor = 0;
31946 ue->mimoInfo.txModDownChgFactor = 0;
31947 ueDl->laCb[0].deltaiTbs = 0;
31953 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31958 * Function: rgSchCheckAndTriggerModeChange(cell, ueUl, iTbsNew)
31959 * Purpose: This function update and check for threashold for TM mode
31964 * @param[in] RgSchCellCb *cell
31965 * @param[in] RgSchUeCb *ue
31966 * @param[in] U8 iTbs
31970 PUBLIC Void rgSchCheckAndTriggerModeChange
31979 PUBLIC Void rgSchCheckAndTriggerModeChange(cell, ue, reportediTbs, previTbs, maxiTbs)
31987 RgrTxMode txMode; /*!< UE's Transmission Mode */
31988 RgrTxMode modTxMode; /*!< UE's Transmission Mode */
31990 TRC2(rgSchCheckAndTriggerModeChange);
31992 txMode = ue->mimoInfo.txMode;
31994 /* Check for Step down */
31995 /* Step down only when TM4 is configured. */
31996 if(RGR_UE_TM_4 == txMode)
31998 if((previTbs <= reportediTbs) && ((reportediTbs - previTbs) >= RG_SCH_MODE_CHNG_STEPDOWN_CHECK_FACTOR))
32000 ue->mimoInfo.txModDownChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32004 ue->mimoInfo.txModDownChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32007 ue->mimoInfo.txModDownChgFactor =
32008 RGSCH_MAX(ue->mimoInfo.txModDownChgFactor, -(RG_SCH_MODE_CHNG_STEPDOWN_THRSHD));
32010 if(ue->mimoInfo.txModDownChgFactor >= RG_SCH_MODE_CHNG_STEPDOWN_THRSHD)
32012 /* Trigger Mode step down */
32013 modTxMode = RGR_UE_TM_3;
32014 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32018 /* Check for Setup up */
32019 /* Step Up only when TM3 is configured, Max possible Mode is TM4*/
32020 if(RGR_UE_TM_3 == txMode)
32022 if((previTbs > reportediTbs) || (maxiTbs == previTbs))
32024 ue->mimoInfo.txModUpChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32028 ue->mimoInfo.txModUpChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32031 ue->mimoInfo.txModUpChgFactor =
32032 RGSCH_MAX(ue->mimoInfo.txModUpChgFactor, -(RG_SCH_MODE_CHNG_STEPUP_THRSHD));
32034 /* Check if TM step up need to be triggered */
32035 if(ue->mimoInfo.txModUpChgFactor >= RG_SCH_MODE_CHNG_STEPUP_THRSHD)
32037 /* Trigger mode chnage */
32038 modTxMode = RGR_UE_TM_4;
32039 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32048 * @brief Updates the GBR LCGs when datInd is received from MAC
32052 * Function: rgSCHCmnIsDlCsgPrio (cell)
32053 * Purpose: This function returns if csg UEs are
32054 * having priority at current time
32056 * Invoked by: Scheduler
32058 * @param[in] RgSchCellCb *cell
32059 * @param[in] RgSchUeCb *ue
32060 * @param[in] RgInfUeDatInd *datInd
32064 PUBLIC Bool rgSCHCmnIsDlCsgPrio
32069 PUBLIC Bool rgSCHCmnIsDlCsgPrio(cell)
32074 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
32076 TRC2(rgSCHCmnIsDlCsgPrio)
32077 /* Calculating the percentage resource allocated */
32078 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32084 if(((cmnDlCell->ncsgPrbCnt * 100) / cmnDlCell->totPrbCnt) < cell->minDlResNonCsg)
32096 * @brief Updates the GBR LCGs when datInd is received from MAC
32100 * Function: rgSCHCmnIsUlCsgPrio (cell)
32101 * Purpose: This function returns if csg UEs are
32102 * having priority at current time
32104 * Invoked by: Scheduler
32106 * @param[in] RgSchCellCb *cell
32107 * @param[in] RgSchUeCb *ue
32108 * @param[in] RgInfUeDatInd *datInd
32112 PUBLIC Bool rgSCHCmnIsUlCsgPrio
32117 PUBLIC Bool rgSCHCmnIsUlCsgPrio(cell)
32121 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
32123 TRC2(rgSCHCmnIsUlCsgPrio)
32125 /* Calculating the percentage resource allocated */
32126 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32132 if (((cmnUlCell->ncsgPrbCnt * 100) /cmnUlCell->totPrbCnt) < cell->minUlResNonCsg)
32143 /** @brief DL scheduler for SPS, and all other downlink data
32147 * Function: rgSchCmnPreDlSch
32149 * @param [in] Inst schInst;
32154 PUBLIC Void rgSchCmnPreDlSch
32156 RgSchCellCb **cell,
32158 RgSchCellCb **cellLst
32161 PUBLIC Void rgSchCmnPreDlSch(cell, nCell, cellLst)
32162 RgSchCellCb **cell;
32164 RgSchCellCb **cellLst;
32167 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell[0]);
32171 TRC2(rgSchCmnPreDlSch);
32173 if(nCell > CM_LTE_MAX_CELLS)
32178 if (cell[0]->isDlDataAllwd && (cell[0]->stopDlSch == FALSE))
32180 /* Specific DL scheduler to perform UE scheduling */
32181 cellSch->apisDl->rgSCHDlPreSched(cell[0]);
32183 /* Rearranging the cell entries based on their remueCnt in SF.
32184 * cells will be processed in the order of number of ue scheduled
32186 for (idx = 0; idx < nCell; idx++)
32189 cellSch = RG_SCH_CMN_GET_CELL(cell[idx]);
32190 sf = cellSch->allocInfo.dedAlloc.dedDlSf;
32194 cellLst[idx] = cell[idx];
32198 for(j = 0; j < idx; j++)
32200 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cellLst[j]);
32201 RgSchDlSf *subfrm = cmnCell->allocInfo.dedAlloc.dedDlSf;
32203 if(sf->remUeCnt < subfrm->remUeCnt)
32206 for(k = idx; k > j; k--)
32208 cellLst[k] = cellLst[k-1];
32213 cellLst[j] = cell[idx];
32218 for (idx = 0; idx < nCell; idx++)
32220 cellLst[idx] = cell[idx];
32226 /** @brief DL scheduler for SPS, and all other downlink data
32229 * Function: rgSchCmnPstDlSch
32231 * @param [in] Inst schInst;
32236 PUBLIC Void rgSchCmnPstDlSch
32241 PUBLIC Void rgSchCmnPstDlSch(cell)
32245 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32247 TRC2(rgSchCmnPstDlSch);
32249 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
32251 cellSch->apisDl->rgSCHDlPstSched(cell->instIdx);
32256 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
32262 PUBLIC U8 rgSCHCmnCalcPcqiBitSz(ueCb, numTxAnt)
32270 RgSchUePCqiCb *cqiCb = ueCb->nPCqiCb;
32272 TRC3(rgSCHCmnCalcPcqiBitSz);
32274 confRepMode = cqiCb->cqiCfg.cqiSetup.prdModeEnum;
32275 if((ueCb->mimoInfo.txMode != RGR_UE_TM_3) &&
32276 (ueCb->mimoInfo.txMode != RGR_UE_TM_4))
32282 ri = cqiCb->perRiVal;
32284 switch(confRepMode)
32286 case RGR_PRD_CQI_MOD10:
32292 case RGR_PRD_CQI_MOD11:
32305 else if(numTxAnt == 4)
32318 /* This is number of antenna case 1.
32319 * This is not applicable for Mode 1-1.
32320 * So setting it to invalid value */
32326 case RGR_PRD_CQI_MOD20:
32334 pcqiSz = 4 + cqiCb->label;
32339 case RGR_PRD_CQI_MOD21:
32354 else if(numTxAnt == 4)
32367 /* This might be number of antenna case 1.
32368 * For mode 2-1 wideband case only antenna port 2 or 4 is supported.
32369 * So setting invalid value.*/
32377 pcqiSz = 4 + cqiCb->label;
32381 pcqiSz = 7 + cqiCb->label;
32394 /** @brief DL scheduler for SPS, and all other downlink data
32398 * Function: rgSCHCmnDlSch
32400 * @param [in] RgSchCellCb *cell
32406 PUBLIC Void rgSCHCmnDlSch
32411 PUBLIC Void rgSCHCmnDlSch (cell)
32416 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32418 RgSchDynTddCb *rgSchDynTddInfo = &(rgSchCb[cell->instIdx].rgSchDynTdd);
32422 TRC2(rgSCHCmnDlSch);
32424 dlSf = rgSCHUtlSubFrmGet(cell, cellSch->dl.time);
32426 if (rgSchDynTddInfo->isDynTddEnbld)
32428 RG_SCH_DYN_TDD_GET_SFIDX(dlCntrlSfIdx, rgSchDynTddInfo->crntDTddSfIdx,
32429 RG_SCH_CMN_DL_DELTA);
32430 if(RG_SCH_DYNTDD_DLC_ULD == rgSchDynTddInfo->sfInfo[dlCntrlSfIdx].sfType)
32432 if(1 == cell->cellId)
32434 ul5gtfsidDlAlreadyMarkUl++;
32436 printf("ul5gtfsidDlAlreadyMarkUl: %d, [sfn:sf] [%04d:%02d]\n",
32437 ul5gtfsidDlAlreadyMarkUl, cellSch->dl.time.sfn,
32438 cellSch->dl.time.slot);
32446 /* Specific DL scheduler to perform UE scheduling */
32447 cellSch->apisDl->rgSCHDlNewSched(cell, &cellSch->allocInfo);
32448 /* LTE_ADV_FLAG_REMOVED_END */
32450 /* call common allocator for RB Allocation */
32451 rgSCHCmnDlRbAlloc(cell, &cellSch->allocInfo);
32453 /* Finalize the Allocations for reqested Against alloced */
32454 rgSCHCmnDlAllocFnlz(cell);
32456 /* Perform Pdcch allocations for PDCCH Order Q.
32457 * As of now, giving this the least preference.
32458 * This func call could be moved above other allocations
32460 rgSCHCmnGenPdcchOrder(cell, dlSf);
32462 /* Do group power control for PUCCH */
32463 rgSCHCmnGrpPwrCntrlPucch(cell, dlSf);
32468 /**********************************************************************
32471 **********************************************************************/