1 /*******************************************************************************
2 ################################################################################
3 # Copyright (c) [2017-2019] [Radisys] #
5 # Licensed under the Apache License, Version 2.0 (the "License"); #
6 # you may not use this file except in compliance with the License. #
7 # You may obtain a copy of the License at #
9 # http://www.apache.org/licenses/LICENSE-2.0 #
11 # Unless required by applicable law or agreed to in writing, software #
12 # distributed under the License is distributed on an "AS IS" BASIS, #
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 # See the License for the specific language governing permissions and #
15 # limitations under the License. #
16 ################################################################################
17 *******************************************************************************/
19 /************************************************************************
25 Desc: C source code for Entry point fucntions
29 **********************************************************************/
31 /** @file rg_sch_cmn.c
32 @brief This file implements the schedulers main access to MAC layer code.
35 static const char* RLOG_MODULE_NAME="MAC";
36 static int RLOG_FILE_ID=187;
37 static int RLOG_MODULE_ID=4096;
39 /* header include files -- defines (.h) */
40 #include "common_def.h"
46 #include "rg_sch_err.h"
47 #include "rg_sch_inf.h"
49 #include "rg_sch_cmn.h"
50 #include "rl_interface.h"
51 #include "rl_common.h"
53 /* header/extern include files (.x) */
54 #include "tfu.x" /* TFU types */
55 #include "lrg.x" /* layer management typedefs for MAC */
56 #include "rgr.x" /* layer management typedefs for MAC */
57 #include "rgm.x" /* layer management typedefs for MAC */
58 #include "rg_sch_inf.x" /* typedefs for Scheduler */
59 #include "rg_sch.x" /* typedefs for Scheduler */
60 #include "rg_sch_cmn.x" /* typedefs for Scheduler */
62 #include "lrg.x" /* Stats Structures */
63 #endif /* MAC_SCH_STATS */
66 #endif /* __cplusplus */
69 EXTERN U32 emtcStatsUlTomSrInd;
70 EXTERN U32 emtcStatsUlBsrTmrTxp;
73 #define RG_ITBS_DIFF(_x, _y) ((_x) > (_y) ? (_x) - (_y) : (_y) - (_x))
74 EXTERN Void rgSCHSc1UlInit ARGS((RgUlSchdApis *apis));
75 #ifdef RG_PHASE2_SCHED
76 EXTERN Void rgSCHRrUlInit ARGS((RgUlSchdApis *apis));
78 EXTERN Void rgSCHEmtcHqInfoFree ARGS((RgSchCellCb *cell, RgSchDlHqProcCb *hqP));
79 EXTERN Void rgSCHEmtcRrUlInit ARGS((RgUlSchdApis *apis));
80 EXTERN Void rgSCHEmtcCmnDlInit ARGS((Void));
81 EXTERN Void rgSCHEmtcCmnUlInit ARGS((Void));
82 EXTERN Void rgSCHEmtcCmnUeNbReset ARGS((RgSchUeCb *ueCb));
83 EXTERN RgSchCmnCqiToTbs *rgSchEmtcCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
85 EXTERN Void rgSCHMaxciUlInit ARGS((RgUlSchdApis *apis));
86 EXTERN Void rgSCHPfsUlInit ARGS((RgUlSchdApis *apis));
88 EXTERN Void rgSCHSc1DlInit ARGS((RgDlSchdApis *apis));
89 #ifdef RG_PHASE2_SCHED
90 EXTERN Void rgSCHRrDlInit ARGS((RgDlSchdApis *apis));
92 EXTERN Void rgSCHEmtcRrDlInit ARGS((RgDlEmtcSchdApis *apis));
94 EXTERN Void rgSCHMaxciDlInit ARGS((RgDlSchdApis *apis));
95 EXTERN Void rgSCHPfsDlInit ARGS((RgDlSchdApis *apis));
97 EXTERN Void rgSCHDlfsInit ARGS((RgDlfsSchdApis *apis));
101 EXTERN Void rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl ARGS((RgSchCellCb *cell));
102 EXTERN Void rgSCHCmnGetEmtcDciFrmtSizes ARGS((RgSchCellCb *cell));
103 EXTERN Void rgSCHEmtcRrUlProcRmvFrmRetx ARGS((RgSchCellCb *cell, RgSchUlHqProcCb *proc));
104 EXTERN S16 rgSCHCmnPrecompEmtcMsg3Vars
106 RgSchCmnUlCell *cellUl,
112 PUBLIC Void rgSCHEmtcCmnUeCcchSduDel
117 EXTERN Void rgSCHEmtcRmvFrmTaLst
119 RgSchCmnDlCell *cellDl,
122 EXTERN Void rgSCHEmtcInitTaLst
124 RgSchCmnDlCell *cellDl
126 EXTERN Void rgSCHEmtcAddToTaLst
128 RgSchCmnDlCell *cellDl,
135 PRIVATE Void rgSCHDlSiSched ARGS((RgSchCellCb *cell,
136 RgSchCmnDlRbAllocInfo *allocInfo,
137 RgInfSfAlloc *subfrmAlloc));
138 PRIVATE Void rgSCHChkNUpdSiCfg ARGS((RgSchCellCb *cell));
139 PRIVATE Void rgSCHSelectSi ARGS((RgSchCellCb *cell));
140 #endif /*RGR_SI_SCH*/
141 /* LTE_ADV_FLAG_REMOVED_START */
144 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
152 PRIVATE S16 rgSCHCmnBuildRntpInfo (
160 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
164 RgSchDlRbAlloc *allocInfo,
167 PRIVATE U8 rgSchCmnUlRvIdxToIMcsTbl[4] = {32, 30, 31, 29};
168 PRIVATE Void rgSCHCmnUlNonadapRetx ARGS((
169 RgSchCmnUlCell *cellUl,
173 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs ARGS((
179 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi ARGS((
190 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1 ARGS((
192 RgSchDlRbAlloc *rbAllocInfo,
193 RgSchDlHqProcCb *hqP,
197 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A ARGS((
199 RgSchDlRbAlloc *rbAllocInfo,
200 RgSchDlHqProcCb *hqP,
204 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B ARGS((
206 RgSchDlRbAlloc *rbAllocInfo,
207 RgSchDlHqProcCb *hqP,
211 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2 ARGS((
213 RgSchDlRbAlloc *rbAllocInfo,
214 RgSchDlHqProcCb *hqP,
218 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A ARGS((
220 RgSchDlRbAlloc *rbAllocInfo,
221 RgSchDlHqProcCb *hqP,
228 PUBLIC Void rgSCHCmnDlSpsSch
232 /* LTE_ADV_FLAG_REMOVED_END */
234 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc ARGS((
236 RgSchCmnDlRbAllocInfo *allocInfo
238 PRIVATE Void rgSCHBcchPcchDlRbAlloc ARGS((
240 RgSchCmnDlRbAllocInfo *allocInfo
242 PRIVATE Void rgSCHCmnDlBcchPcchAlloc ARGS((
246 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
249 TfuDlCqiPucch *pucchCqi,
250 RgrUeCqiRept *ueCqiRept,
252 Bool *is2ndCwCqiAvail
254 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
257 TfuDlCqiPusch *puschCqi,
258 RgrUeCqiRept *ueCqiRept,
260 Bool *is2ndCwCqiAvail
263 PRIVATE Void rgSCHCmnDlCqiOnPucchInd ARGS ((
266 TfuDlCqiPucch *pucchCqi
268 PRIVATE Void rgSCHCmnDlCqiOnPuschInd ARGS ((
271 TfuDlCqiPusch *puschCqi
274 /* ccpu00117452 - MOD - Changed macro name from
275 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
277 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept ARGS((
280 RgrUeCqiRept *ueCqiRept));
281 #endif /* End of RGR_CQI_REPT */
282 /* Fix: syed align multiple UEs to refresh at same time */
283 PRIVATE Void rgSCHCmnGetRefreshPer ARGS((
287 PRIVATE S16 rgSCHCmnApplyUeRefresh ARGS((
291 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa ARGS
296 PRIVATE Void rgSCHCheckAndSetTxScheme ARGS
304 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz ARGS
314 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw ARGS
330 PRIVATE Void rgSCHCmnInitRbAlloc ARGS
336 #endif /* __cplusplus */
340 PUBLIC RgSchdApis rgSchCmnApis;
341 PRIVATE RgUlSchdApis rgSchUlSchdTbl[RGSCH_NUM_SCHEDULERS];
342 PRIVATE RgDlSchdApis rgSchDlSchdTbl[RGSCH_NUM_SCHEDULERS];
344 PRIVATE RgUlSchdApis rgSchEmtcUlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
345 PRIVATE RgDlEmtcSchdApis rgSchEmtcDlSchdTbl[RGSCH_NUM_EMTC_SCHEDULERS];
347 #ifdef RG_PHASE2_SCHED
348 PRIVATE RgDlfsSchdApis rgSchDlfsSchdTbl[RGSCH_NUM_DLFS_SCHEDULERS];
350 PRIVATE RgUlSchdInits rgSchUlSchdInits = RGSCH_ULSCHED_INITS;
351 PRIVATE RgDlSchdInits rgSchDlSchdInits = RGSCH_DLSCHED_INITS;
353 PRIVATE RgEmtcUlSchdInits rgSchEmtcUlSchdInits = RGSCH_EMTC_ULSCHED_INITS;
354 PRIVATE RgEmtcDlSchdInits rgSchEmtcDlSchdInits = RGSCH_EMTC_DLSCHED_INITS;
356 #if (defined (RG_PHASE2_SCHED) && defined (TFU_UPGRADE))
357 PRIVATE RgDlfsSchdInits rgSchDlfsSchdInits = RGSCH_DLFSSCHED_INITS;
360 typedef Void (*RgSchCmnDlAllocRbFunc) ARGS((RgSchCellCb *cell, RgSchDlSf *subFrm,
361 RgSchUeCb *ue, U32 bo, U32 *effBo, RgSchDlHqProcCb *proc,
362 RgSchCmnDlRbAllocInfo *cellWdAllocInfo));
363 typedef U8 (*RgSchCmnDlGetPrecInfFunc) ARGS((RgSchCellCb *cell, RgSchUeCb *ue,
364 U8 numLyrs, Bool bothCwEnbld));
365 PRIVATE Void rgSCHCmnDlAllocTxRbTM1 ARGS((
371 RgSchDlHqProcCb *proc,
372 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
374 PRIVATE Void rgSCHCmnDlAllocTxRbTM2 ARGS((
380 RgSchDlHqProcCb *proc,
381 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
383 PRIVATE Void rgSCHCmnDlAllocTxRbTM3 ARGS((
389 RgSchDlHqProcCb *proc,
390 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
392 PRIVATE Void rgSCHCmnDlAllocTxRbTM4 ARGS((
398 RgSchDlHqProcCb *proc,
399 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
402 PRIVATE Void rgSCHCmnDlAllocTxRbTM5 ARGS((
408 RgSchDlHqProcCb *proc,
409 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
412 PRIVATE Void rgSCHCmnDlAllocTxRbTM6 ARGS((
418 RgSchDlHqProcCb *proc,
419 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
421 PRIVATE Void rgSCHCmnDlAllocTxRbTM7 ARGS((
427 RgSchDlHqProcCb *proc,
428 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
430 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1 ARGS((
436 RgSchDlHqProcCb *proc,
437 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
439 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2 ARGS((
445 RgSchDlHqProcCb *proc,
446 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
448 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3 ARGS((
454 RgSchDlHqProcCb *proc,
455 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
457 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4 ARGS((
463 RgSchDlHqProcCb *proc,
464 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
467 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5 ARGS((
473 RgSchDlHqProcCb *proc,
474 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
477 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6 ARGS((
483 RgSchDlHqProcCb *proc,
484 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
486 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7 ARGS((
492 RgSchDlHqProcCb *proc,
493 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
497 PRIVATE U8 rgSchGetN1ResCount ARGS ((
501 PUBLIC Bool rgSchCmnChkDataOnlyOnPcell
507 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
514 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
515 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[7] = {rgSCHCmnDlAllocTxRbTM1,
516 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
517 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7};
519 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
520 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[7] = {rgSCHCmnDlAllocRetxRbTM1,
521 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
522 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7};
524 /* Functions specific to each transmission mode for DL Tx RB Allocation*/
525 RgSchCmnDlAllocRbFunc dlAllocTxRbFunc[9] = {rgSCHCmnDlAllocTxRbTM1,
526 rgSCHCmnDlAllocTxRbTM2, rgSCHCmnDlAllocTxRbTM3, rgSCHCmnDlAllocTxRbTM4,
527 NULLP, rgSCHCmnDlAllocTxRbTM6, rgSCHCmnDlAllocTxRbTM7, NULLP, NULLP};
529 /* Functions specific to each transmission mode for DL Retx RB Allocation*/
530 RgSchCmnDlAllocRbFunc dlAllocRetxRbFunc[9] = {rgSCHCmnDlAllocRetxRbTM1,
531 rgSCHCmnDlAllocRetxRbTM2, rgSCHCmnDlAllocRetxRbTM3, rgSCHCmnDlAllocRetxRbTM4,
532 NULLP, rgSCHCmnDlAllocRetxRbTM6, rgSCHCmnDlAllocRetxRbTM7, NULLP, NULLP};
537 PRIVATE U8 rgSCHCmnDlTM3PrecInf2 ARGS((
543 PRIVATE U8 rgSCHCmnDlTM3PrecInf4 ARGS((
549 PRIVATE U8 rgSCHCmnDlTM4PrecInf2 ARGS((
555 PRIVATE U8 rgSCHCmnDlTM4PrecInf4 ARGS((
561 /* Functions specific to each transmission mode for DL RB Allocation*/
562 RgSchCmnDlGetPrecInfFunc getPrecInfoFunc[2][2] = {
563 {rgSCHCmnDlTM3PrecInf2, rgSCHCmnDlTM3PrecInf4},
564 {rgSCHCmnDlTM4PrecInf2, rgSCHCmnDlTM4PrecInf4}
567 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb ARGS((
571 RgSchDlHqTbCb *tbInfo,
576 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb ARGS((
580 RgSchDlHqProcCb *proc,
585 PRIVATE Void rgSCHCmnDlTM3TxTx ARGS((
591 RgSchDlHqProcCb *proc,
592 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
594 PRIVATE Void rgSCHCmnDlTM3TxRetx ARGS((
600 RgSchDlHqProcCb *proc,
601 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
603 PRIVATE Void rgSCHCmnDlTM3RetxRetx ARGS((
609 RgSchDlHqProcCb *proc,
610 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
613 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc ARGS((
619 /* LTE_ADV_FLAG_REMOVED_START */
621 PRIVATE Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc ARGS((
628 /* LTE_ADV_FLAG_REMOVED_END */
629 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx ARGS((
631 RgSchCmnDlRbAllocInfo *allocInfo,
633 RgSchDlHqProcCb *proc
635 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx ARGS((
637 RgSchCmnDlRbAllocInfo *allocInfo,
641 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst ARGS((
642 RgSchCmnDlRbAllocInfo *allocInfo,
644 RgSchDlHqProcCb *proc
646 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb ARGS((
650 RgSchDlHqTbCb *reTxTb,
655 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb ARGS((
659 RgSchDlHqProcCb *proc,
664 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb ARGS((
668 RgSchDlHqTbCb *tbInfo,
674 PRIVATE Void rgSCHCmnFillHqPTb ARGS((
676 RgSchDlRbAlloc *rbAllocInfo,
682 PRIVATE Void rgSCHCmnDlGetBestFitHole ARGS((
691 #ifdef RGSCH_SPS_UNUSED
692 PRIVATE U32 rgSCHCmnGetRaType1Mask ARGS((
698 PRIVATE U32 rgSCHCmnGetRaType0Mask ARGS((
702 PRIVATE U32 rgSCHCmnGetRaType2Mask ARGS((
708 PUBLIC Bool rgSCHCmnRetxAllocAvoid ARGS((
711 RgSchDlHqProcCb *proc
714 PUBLIC U16 rgSCHCmnGetSiSetId ARGS((
722 //TODO_SID: Currenly table is only for 100 Prbs. Need to modify wrt VRBG table 8.1.5.2.1-1 V5G_213
723 U32 rgSch5gtfTbSzTbl[MAX_5GTF_MCS] =
724 {1864, 5256, 8776, 13176, 17576, 21976, 26376, 31656, 35176, 39576, 43976, 47496, 52776, 59376, 66392};
726 U32 gUl5gtfSrRecv = 0;
727 U32 gUl5gtfBsrRecv = 0;
728 U32 gUl5gtfUeSchPick = 0;
729 U32 gUl5gtfPdcchSchd = 0;
730 U32 gUl5gtfAllocAllocated = 0;
731 U32 gUl5gtfUeRbAllocDone = 0;
732 U32 gUl5gtfUeRmvFnlzZeroBo = 0;
733 U32 gUl5gtfUeFnlzReAdd = 0;
734 U32 gUl5gtfPdcchSend = 0;
735 U32 gUl5gtfRbAllocFail = 0;
736 U32 ul5gtfsidUlMarkUl = 0;
737 U32 ul5gtfsidDlSchdPass = 0;
738 U32 ul5gtfsidDlAlreadyMarkUl = 0;
739 U32 ul5gtfTotSchdCnt = 0;
742 /* CQI Offset Index to Beta CQI Offset value mapping,
743 * stored as parts per 1000. Reserved is set to 0.
744 * Refer 36.213 sec 8.6.3 Tbl 8.6.3-3 */
745 PUBLIC U32 rgSchCmnBetaCqiOffstTbl[16] = {0, 0, 1125,
746 1250, 1375, 1625, 1750, 2000, 2250, 2500, 2875,
747 3125, 3500, 4000, 5000, 6250};
748 PUBLIC U32 rgSchCmnBetaHqOffstTbl[16] = {2000, 2500, 3125,
749 4000, 5000, 6250, 8000,10000, 12625, 15875, 20000,
750 31000, 50000,80000,126000,0};
751 PUBLIC U32 rgSchCmnBetaRiOffstTbl[16] = {1250, 1625, 2000,
752 2500, 3125, 4000, 5000, 6250, 8000, 10000,12625,
754 PUBLIC S8 rgSchCmnDlCqiDiffOfst[8] = {0, 1, 2, 3, -4, -3, -2, -1};
756 /* Include CRS REs while calculating Efficiency */
757 CONSTANT PRIVATE U8 rgSchCmnAntIdx[5] = {0,0,1,0,2};
758 CONSTANT PRIVATE U8 rgSchCmnNumResForCrs[5] = {0,6,12,0,16};
765 PUBLIC S8 rgSchCmnApUeSelDiffCqi[4] = {1, 2, 3, 4};
766 PUBLIC S8 rgSchCmnApEnbConfDiffCqi[4] = {0, 1, 2, -1};
769 typedef struct rgSchCmnDlUeDciFrmtOptns
771 TfuDciFormat spfcDciFrmt; /* TM(Transmission Mode) specific DCI format.
772 * Search space : UE Specific by C-RNTI only. */
773 U8 spfcDciRAType; /* Resource Alloctn(RA) type for spfcDciFrmt */
774 TfuDciFormat prfrdDciFrmt; /* Preferred DCI format among the available
775 * options for TD (Transmit Diversity) */
776 U8 prfrdDciRAType; /* Resource Alloctn(RA) type for prfrdDciFrmt */
777 }RgSchCmnDlUeDciFrmtOptns;
780 /* DCI Format options for each Transmission Mode */
781 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[7] = {
782 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
783 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
784 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
785 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
786 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
787 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
788 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
792 /* DCI Format options for each Transmission Mode */
793 RgSchCmnDlUeDciFrmtOptns rgSchCmnDciFrmtOptns[9] = {
794 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
795 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
796 {TFU_DCI_FORMAT_2A,RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
797 {TFU_DCI_FORMAT_2, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
798 {TFU_DCI_FORMAT_1D,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
799 {TFU_DCI_FORMAT_1B,RG_SCH_CMN_RA_TYPE2, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2},
800 {TFU_DCI_FORMAT_1, RG_SCH_CMN_RA_TYPE0, TFU_DCI_FORMAT_1A, RG_SCH_CMN_RA_TYPE2}
805 typedef struct rgSchCmnDlImcsTbl
807 U8 modOdr; /* Modulation Order */
809 }RgSchCmnDlImcsTbl[29];
811 CONSTANT struct rgSchCmnMult235Info
813 U8 match; /* Closest number satisfying 2^a.3^b.5^c, with a bias
814 * towards the smaller number */
815 U8 prvMatch; /* Closest number not greater than array index
816 * satisfying 2^a.3^b.5^c */
817 } rgSchCmnMult235Tbl[110+1] = {
819 {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {6, 6}, {6, 6}, {8, 8},
820 {9, 9}, {10, 10}, {10, 10}, {12, 12}, {12, 12}, {15, 12}, {15, 15},
821 {16, 16}, {16, 16}, {18, 18}, {18, 18}, {20, 20}, {20, 20}, {20, 20},
822 {24, 20}, {24, 24}, {25, 25}, {25, 25}, {27, 27}, {27, 27}, {30, 27},
823 {30, 30}, {30, 30}, {32, 32}, {32, 32}, {32, 32}, {36, 32}, {36, 36},
824 {36, 36}, {36, 36}, {40, 36}, {40, 40}, {40, 40}, {40, 40}, {45, 40},
825 {45, 40}, {45, 45}, {45, 45}, {48, 45}, {48, 48}, {48, 48}, {50, 50},
826 {50, 50}, {50, 50}, {54, 50}, {54, 54}, {54, 54}, {54, 54}, {54, 54},
827 {60, 54}, {60, 54}, {60, 60}, {60, 60}, {60, 60}, {64, 60}, {64, 64},
828 {64, 64}, {64, 64}, {64, 64}, {64, 64}, {72, 64}, {72, 64}, {72, 64},
829 {72, 72}, {72, 72}, {75, 72}, {75, 75}, {75, 75}, {75, 75}, {80, 75},
830 {80, 75}, {80, 80}, {81, 81}, {81, 81}, {81, 81}, {81, 81}, {81, 81},
831 {90, 81}, {90, 81}, {90, 81}, {90, 81}, {90, 90}, {90, 90}, {90, 90},
832 {90, 90}, {96, 90}, {96, 90}, {96, 96}, {96, 96}, {96, 96}, {100, 96},
833 {100, 100}, {100, 100}, {100, 100}, {100, 100}, {100, 100}, {108, 100},
834 {108, 100}, {108, 100}, {108, 108}, {108, 108}, {108, 108}
838 /* BI table from 36.321 Table 7.2.1 */
839 CONSTANT PRIVATE S16 rgSchCmnBiTbl[RG_SCH_CMN_NUM_BI_VAL] = {
840 0, 10, 20, 30,40,60,80,120,160,240,320,480,960};
841 PUBLIC RgSchCmnUlCqiInfo rgSchCmnUlCqiTbl[RG_SCH_CMN_UL_NUM_CQI] = {
843 {RGSCH_CMN_QM_CQI_1,RGSCH_CMN_UL_EFF_CQI_1 },
844 {RGSCH_CMN_QM_CQI_2,RGSCH_CMN_UL_EFF_CQI_2 },
845 {RGSCH_CMN_QM_CQI_3,RGSCH_CMN_UL_EFF_CQI_3 },
846 {RGSCH_CMN_QM_CQI_4,RGSCH_CMN_UL_EFF_CQI_4 },
847 {RGSCH_CMN_QM_CQI_5,RGSCH_CMN_UL_EFF_CQI_5 },
848 {RGSCH_CMN_QM_CQI_6,RGSCH_CMN_UL_EFF_CQI_6 },
849 {RGSCH_CMN_QM_CQI_7,RGSCH_CMN_UL_EFF_CQI_7 },
850 {RGSCH_CMN_QM_CQI_8,RGSCH_CMN_UL_EFF_CQI_8 },
851 {RGSCH_CMN_QM_CQI_9,RGSCH_CMN_UL_EFF_CQI_9 },
852 {RGSCH_CMN_QM_CQI_10,RGSCH_CMN_UL_EFF_CQI_10 },
853 {RGSCH_CMN_QM_CQI_11,RGSCH_CMN_UL_EFF_CQI_11 },
854 {RGSCH_CMN_QM_CQI_12,RGSCH_CMN_UL_EFF_CQI_12 },
855 {RGSCH_CMN_QM_CQI_13,RGSCH_CMN_UL_EFF_CQI_13 },
856 {RGSCH_CMN_QM_CQI_14,RGSCH_CMN_UL_EFF_CQI_14 },
857 {RGSCH_CMN_QM_CQI_15,RGSCH_CMN_UL_EFF_CQI_15 },
861 /* This table maps a (delta_offset * 2 + 2) to a (beta * 8)
862 * where beta is 10^-(delta_offset/10) rounded off to nearest 1/8
864 PRIVATE U16 rgSchCmnUlBeta8Tbl[29] = {
865 6, RG_SCH_CMN_UL_INVALID_BETA8, 8, 9, 10, 11, 13, 14, 16, 18, 20, 23,
866 25, 28, 32, RG_SCH_CMN_UL_INVALID_BETA8, 40, RG_SCH_CMN_UL_INVALID_BETA8,
867 50, RG_SCH_CMN_UL_INVALID_BETA8, 64, RG_SCH_CMN_UL_INVALID_BETA8, 80,
868 RG_SCH_CMN_UL_INVALID_BETA8, 101, RG_SCH_CMN_UL_INVALID_BETA8, 127,
869 RG_SCH_CMN_UL_INVALID_BETA8, 160
873 /* QCI to SVC priority mapping. Index specifies the Qci*/
874 PRIVATE U8 rgSchCmnDlQciPrio[RG_SCH_CMN_MAX_QCI] = RG_SCH_CMN_QCI_TO_PRIO;
876 /* The configuration is efficiency measured per 1024 REs. */
877 /* The first element stands for when CQI is not known */
878 /* This table is used to translate CQI to its corrospoding */
879 /* allocation parameters. These are currently from 36.213 */
880 /* Just this talbe needs to be edited for modifying the */
881 /* the resource allocation behaviour */
883 /* ADD CQI to MCS mapping correction
884 * single dimensional array is replaced by 2 dimensions for different CFI*/
885 PRIVATE U16 rgSchCmnCqiPdschEff[4][16] = {RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI1,
886 RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI2,RG_SCH_CMN_CQI_TO_PDSCH_EFF_CFI3};
888 PRIVATE U16 rgSchCmn2LyrCqiPdschEff[4][16] = {RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI0 ,RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI1,
889 RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI2, RG_SCH_CMN_2LYR_CQI_TO_PDSCH_EFF_CFI3};
891 /* This configuration determines the transalation of a UEs CQI to its */
892 /* PDCCH coding efficiency. This may be edited based on the installation */
893 PRIVATE U8 rgSchCmnDlRvTbl[4] = {0, 2, 3, 1}; /* RVIdx sequence is corrected*/
895 /* Indexed by [DciFrmt].
896 * Considering the following definition in determining the dciFrmt index.
911 PRIVATE U16 rgSchCmnDciFrmtSizes[10];
914 PRIVATE U16 rgSchCmnCqiPdcchEff[16] = RG_SCH_CMN_CQI_TO_PDCCH_EFF;
918 PUBLIC RgSchTddUlDlSubfrmTbl rgSchTddUlDlSubfrmTbl = {
919 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME},
920 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
921 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
922 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
923 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
924 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME},
925 {RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME, RG_SCH_TDD_SPL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_UL_SUBFRAME, RG_SCH_TDD_DL_SUBFRAME}
930 PUBLIC U8 rgSchTddSpsDlMaxRetxTbl[RGSCH_MAX_TDD_UL_DL_CFG] = {
942 /* Special Subframes in OFDM symbols */
943 /* ccpu00134197-MOD-Correct the number of symbols */
944 PUBLIC RgSchTddSplSubfrmInfoTbl rgSchTddSplSubfrmInfoTbl = {
948 {11, 1, 1, 10, 1, 1},
956 /* PHICH 'm' value Table */
957 PUBLIC RgSchTddPhichMValTbl rgSchTddPhichMValTbl = {
958 {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
959 {0, 1, 0, 0, 1, 0, 1, 0, 0, 1},
960 {0, 0, 0, 1, 0, 0, 0, 0, 1, 0},
961 {1, 0, 0, 0, 0, 0, 0, 0, 1, 1},
962 {0, 0, 0, 0, 0, 0, 0, 0, 1, 1},
963 {0, 0, 0, 0, 0, 0, 0, 0, 1, 0},
964 {1, 1, 0, 0, 0, 1, 1, 0, 0, 1}
967 /* PHICH 'K' value Table */
968 PUBLIC RgSchTddKPhichTbl rgSchTddKPhichTbl = {
969 {0, 0, 4, 7, 6, 0, 0, 4, 7, 6},
970 {0, 0, 4, 6, 0, 0, 0, 4, 6, 0},
971 {0, 0, 6, 0, 0, 0, 0, 6, 0, 0},
972 {0, 0, 6, 6, 6, 0, 0, 0, 0, 0},
973 {0, 0, 6, 6, 0, 0, 0, 0, 0, 0},
974 {0, 0, 6, 0, 0, 0, 0, 0, 0, 0},
975 {0, 0, 4, 6, 6, 0, 0, 4, 7, 0}
978 /* Uplink association index 'K' value Table */
979 PUBLIC RgSchTddUlAscIdxKDashTbl rgSchTddUlAscIdxKDashTbl = {
980 {0, 0, 6, 4, 0, 0, 0, 6, 4, 0},
981 {0, 0, 4, 0, 0, 0, 0, 4, 0, 0},
982 {0, 0, 4, 4, 4, 0, 0, 0, 0, 0},
983 {0, 0, 4, 4, 0, 0, 0, 0, 0, 0},
984 {0, 0, 4, 0, 0, 0, 0, 0, 0, 0},
985 {0, 0, 7, 7, 5, 0, 0, 7, 7, 0}
989 /* PUSCH 'K' value Table */
990 PUBLIC RgSchTddPuschTxKTbl rgSchTddPuschTxKTbl = {
991 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
992 {0, 6, 0, 0, 4, 0, 6, 0, 0, 4},
993 {0, 0, 0, 4, 0, 0, 0, 0, 4, 0},
994 {4, 0, 0, 0, 0, 0, 0, 0, 4, 4},
995 {0, 0, 0, 0, 0, 0, 0, 0, 4, 4},
996 {0, 0, 0, 0, 0, 0, 0, 0, 4, 0},
997 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1000 /* PDSCH to PUCCH Table for DL Harq Feed back. Based on the
1001 Downlink association set index 'K' table */
1002 PUBLIC U8 rgSchTddPucchTxTbl[7][10] = {
1003 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1004 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1005 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1006 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1007 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1008 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1009 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1012 /* Table to fetch the next DL sf idx for applying the
1013 new CFI. The next Dl sf Idx at which the new CFI
1014 is applied is always the starting Sf of the next ACK/NACK
1017 Ex: In Cfg-2, sf4 and sf9 are the only subframes at which
1018 a new ACK/NACK bundle of DL subframes can start
1020 D S U D D D S U D D D S U D D D S U D D
1023 dlSf Array for Cfg-2:
1024 sfNum: 0 1 3 4 5 6 8 9 0 1 3 4 5 6 8 9
1025 sfIdx: 0 1 2 3 4 5 6 7 8 9 10 11 12 12 14 15
1027 If CFI changes at sf0, nearest DL SF bundle >= 4 TTI is sf4
1028 So at sf4 the new CFI can be applied. To arrive at sf4 from
1029 sf0, the sfIdx has to be increased by 3 */
1031 PUBLIC U8 rgSchTddPdcchSfIncTbl[7][10] = {
1032 /* A/N Bundl: 0,1,5,6*/ {2, 1, 0, 0, 0, 2, 1, 0, 0, 0},
1033 /* A/N Bundl: 0,4,5,9*/ {2, 2, 0, 0, 3, 2, 2, 0, 0, 3},
1034 /* A/N Bundl: 4,9*/ {3, 6, 0, 5, 4, 3, 6, 0, 5, 4},
1035 /* A/N Bundl: 1,7,9*/ {4, 3, 0, 0, 0, 4, 5, 4, 6, 5},
1036 /* A/N Bundl: 0,6*/ {4, 3, 0, 0, 6, 5, 4, 7, 6, 5},
1037 /* A/N Bundl: 9*/ {8, 7, 0, 6, 5, 4, 12, 11, 10, 9},
1038 /* A/N Bundl: 0,1,5,6,9*/ {2, 1, 0, 0, 0, 2, 2, 0, 0, 3}
1042 /* combine compilation fixes */
1044 /* subframe offset values to be used when twoIntervalsConfig is enabled in UL
1046 PUBLIC RgSchTddSfOffTbl rgSchTddSfOffTbl = {
1047 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1048 {0, 0, 1, -1, 0, 0, 0, 1, -1, 0},
1049 {0, 0, 5, 0, 0, 0, 0, -5, 0, 0},
1050 {0, 0, 1, 1, -2, 0, 0, 0, 0, 0},
1051 {0, 0, 1, -1, 0, 0, 0, 0, 0, 0},
1052 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
1053 {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
1057 /* Table to determine when uplink SPS configured grants should
1058 * explicitly be reserved in a subframe. When enries are same
1059 * as that of Msg3SubfrmTbl, indicates competition with msg3.
1060 * As of now, this is same as Msg3SubfrmTbl (leaving out uldlcfg 2),
1061 * except that all 255s are now zeros. */
1062 PUBLIC RgSchTddSpsUlRsrvTbl rgSchTddSpsUlRsrvTbl = {
1063 {0, 0, 0, 6, 8, 0, 0, 0, 6, 8},
1064 {0, 0, 6, 9, 0, 0, 0, 6, 9, 0},
1065 {0, 0, 10, 0, 0, 0, 0, 10, 0, 0},
1066 {0, 0, 0, 0, 8, 0, 7, 7, 14, 0},
1067 {0, 0, 0, 9, 0, 0, 7, 15, 0, 0},
1068 {0, 0, 10, 0, 0, 0, 16, 0, 0, 0},
1069 {0, 0, 0, 0, 8, 0, 0, 0, 9, 0}
1072 /* Inverse DL Assoc Set index Table */
1073 PUBLIC RgSchTddInvDlAscSetIdxTbl rgSchTddInvDlAscSetIdxTbl = {
1074 {4, 6, 0, 0, 0, 4, 6, 0, 0, 0},
1075 {7, 6, 0, 0, 4, 7, 6, 0, 0, 4},
1076 {7, 6, 0, 4, 8, 7, 6, 0, 4, 8},
1077 {4, 11, 0, 0, 0, 7, 6, 6, 5, 5},
1078 {12, 11, 0, 0, 8, 7, 7, 6, 5, 4},
1079 {12, 11, 0, 9, 8, 7, 6, 5, 4, 13},
1080 {7, 7, 0, 0, 0, 7, 7, 0, 0, 5}
1083 #endif /* (LTEMAC_SPS ) */
1085 /* Number of Uplink subframes Table */
1086 PRIVATE U8 rgSchTddNumUlSf[] = {6, 4, 2, 3, 2, 1, 5};
1088 /* Downlink HARQ processes Table */
1089 PUBLIC RgSchTddUlNumHarqProcTbl rgSchTddUlNumHarqProcTbl = { 7, 4, 2, 3, 2, 1, 6};
1091 /* Uplink HARQ processes Table */
1092 PUBLIC RgSchTddDlNumHarqProcTbl rgSchTddDlNumHarqProcTbl = { 4, 7, 10, 9, 12, 15, 6};
1094 /* Downlink association index set 'K' value Table */
1095 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlAscSetIdxKTbl = {
1096 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1098 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1100 { {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 4, 6}}, {0, {0}}, {0, {0}} },
1102 { {0, {0}}, {0, {0}}, {3, {7, 6, 11}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1104 { {0, {0}}, {0, {0}}, {4, {12, 8, 7, 11}}, {4, {6, 5, 4, 7}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1106 { {0, {0}}, {0, {0}}, {9, {13, 12, 9, 8, 7, 5, 4, 11, 6}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1108 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1111 /* ccpu132282-ADD-the table rgSchTddDlAscSetIdxKTbl is rearranged in
1112 * decreasing order of Km, this is used to calculate the NCE used for
1113 * calculating N1Pucch Resource for Harq*/
1114 PUBLIC RgSchTddDlAscSetIdxKTbl rgSchTddDlHqPucchResCalTbl = {
1115 { {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}}, {0, {0}}, {0, {0}}, {1, {6}}, {0, {0}}, {1, {4}} },
1117 { {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}}, {0, {0}}, {0, {0}}, {2, {7, 6}}, {1, {4}}, {0, {0}} },
1119 { {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {4, {8, 7, 6, 4}}, {0, {0}}, {0, {0}} },
1121 { {0, {0}}, {0, {0}}, {3, {11, 7, 6}}, {2, {6, 5}}, {2, {5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1123 { {0, {0}}, {0, {0}}, {4, {12, 11, 8, 7}}, {4, {7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1125 { {0, {0}}, {0, {0}}, {9, {13, 12, 11, 9, 8, 7, 6, 5, 4}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}}, {0, {0}} },
1127 { {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {1, {5}}, {0, {0}}, {0, {0}}, {1, {7}}, {1, {7}}, {0, {0}} }
1130 /* Minimum number of Ack/Nack feeback information to be
1131 stored for each UL-DL configuration */
1132 PUBLIC RgSchTddANFdbkMapTbl rgSchTddANFdbkMapTbl = {4, 4, 2, 3, 2, 1, 5};
1134 /* Uplink switch points and number of UL subframes Table */
1135 PUBLIC RgSchTddMaxUlSubfrmTbl rgSchTddMaxUlSubfrmTbl = {
1136 {2,3,3}, {2,2,2}, {2,1,1}, {1,3,0}, {1,2,0}, {1,1,0}, {2,3,2}
1139 /* Uplink switch points and number of DL subframes Table */
1140 PUBLIC RgSchTddMaxDlSubfrmTbl rgSchTddMaxDlSubfrmTbl = {
1141 {2,2,2}, {2,3,3}, {2,4,4}, {1,7,0}, {1,8,0}, {1,9,0}, {2,2,3}
1144 /* Number of UL subframes present before a particular subframe */
1145 PUBLIC RgSchTddNumUlSubfrmTbl rgSchTddNumUlSubfrmTbl = {
1146 {0, 0, 1, 2, 3, 3, 3, 4, 5, 6},
1147 {0, 0, 1, 2, 2, 2, 2, 3, 4, 4},
1148 {0, 0, 1, 1, 1, 1, 1, 2, 2, 2},
1149 {0, 0, 1, 2, 3, 3, 3, 3, 3, 3},
1150 {0, 0, 1, 2, 2, 2, 2, 2, 2, 2},
1151 {0, 0, 1, 1, 1, 1, 1, 1, 1, 1},
1152 {0, 0, 1, 2, 3, 3, 3, 4, 5, 5}
1155 /* Number of DL subframes present till a particular subframe */
1156 PUBLIC RgSchTddNumDlSubfrmTbl rgSchTddNumDlSubfrmTbl = {
1157 {1, 2, 2, 2, 2, 3, 4, 4, 4, 4},
1158 {1, 2, 2, 2, 3, 4, 5, 5, 5, 6},
1159 {1, 2, 2, 3, 4, 5, 6, 6, 7, 8},
1160 {1, 2, 2, 2, 2, 3, 4, 5, 6, 7},
1161 {1, 2, 2, 2, 3, 4, 5, 6, 7, 8},
1162 {1, 2, 2, 3, 4, 5, 6, 7, 8, 9},
1163 {1, 2, 2, 2, 2, 3, 4, 4, 4, 5}
1167 /* Nearest possible UL subframe Index from UL subframe
1168 * DL Index < UL Index */
1169 PUBLIC RgSchTddLowDlSubfrmIdxTbl rgSchTddLowDlSubfrmIdxTbl = {
1170 {0, 1, 1, 1, 1, 5, 6, 6, 6, 6},
1171 {0, 1, 1, 1, 4, 5, 6, 6, 6, 9},
1172 {0, 1, 1, 3, 4, 5, 6, 6, 8, 9},
1173 {0, 1, 1, 1, 1, 5, 6, 7, 8, 9},
1174 {0, 1, 1, 1, 4, 5, 6, 7, 8, 9},
1175 {0, 1, 1, 3, 4, 5, 6, 7, 8, 9},
1176 {0, 1, 1, 1, 1, 5, 6, 6, 6, 9}
1179 /* Nearest possible DL subframe Index from UL subframe
1180 * DL Index > UL Index
1181 * 10 represents Next SFN low DL Idx */
1182 PUBLIC RgSchTddHighDlSubfrmIdxTbl rgSchTddHighDlSubfrmIdxTbl = {
1183 {0, 1, 5, 5, 5, 5, 6, 10, 10, 10},
1184 {0, 1, 4, 4, 4, 5, 6, 9, 9, 9},
1185 {0, 1, 3, 3, 4, 5, 6, 8, 8, 9},
1186 {0, 1, 5, 5, 5, 5, 6, 7, 8, 9},
1187 {0, 1, 4, 4, 4, 5, 6, 7, 8, 9},
1188 {0, 1, 3, 3, 4, 5, 6, 7, 8, 9},
1189 {0, 1, 5, 5, 5, 5, 6, 9, 9, 9}
1192 /* RACH Message3 related information */
1193 PUBLIC RgSchTddMsg3SubfrmTbl rgSchTddMsg3SubfrmTbl = {
1194 {7, 6, 255, 255, 255, 7, 6, 255, 255, 255},
1195 {7, 6, 255, 255, 8, 7, 6, 255, 255, 8},
1196 {7, 6, 255, 9, 8, 7, 6, 255, 9, 8},
1197 {12, 11, 255, 255, 255, 7, 6, 6, 6, 13},
1198 {12, 11, 255, 255, 8, 7, 6, 6, 14, 13},
1199 {12, 11, 255, 9, 8, 7, 6, 15, 14, 13},
1200 {7, 6, 255, 255, 255, 7, 6, 255, 255, 8}
1203 /* ccpu00132341-DEL Removed rgSchTddRlsDlSubfrmTbl and used Kset table for
1204 * releasing DL HARQs */
1206 /* DwPTS Scheduling Changes Start */
1207 /* Provides the number of Cell Reference Signals in DwPTS
1209 PRIVATE U8 rgSchCmnDwptsCrs[2][3] = {/* [Spl Sf cfg][Ant Port] */
1210 {4, 8, 16}, /* Spl Sf cfg 1,2,3,6,7,8 */
1211 {6, 12, 20}, /* Spl Sf cfg 4 */
1214 PRIVATE S8 rgSchCmnSplSfDeltaItbs[9] = RG_SCH_DWPTS_ITBS_ADJ;
1215 /* DwPTS Scheduling Changes End */
1219 PRIVATE U32 rgSchCmnBsrTbl[64] = {
1220 0, 10, 12, 14, 17, 19, 22, 26,
1221 31, 36, 42, 49, 57, 67, 78, 91,
1222 107, 125, 146, 171, 200, 234, 274, 321,
1223 376, 440, 515, 603, 706, 826, 967, 1132,
1224 1326, 1552, 1817, 2127, 2490, 2915, 3413, 3995,
1225 4677, 5476, 6411, 7505, 8787, 10287, 12043, 14099,
1226 16507, 19325, 22624, 26487, 31009, 36304, 42502, 49759,
1227 58255, 68201, 79846, 93479, 109439, 128125, 150000, 220000
1230 PRIVATE U32 rgSchCmnExtBsrTbl[64] = {
1231 0, 10, 13, 16, 19, 23, 29, 35,
1232 43, 53, 65, 80, 98, 120, 147, 181,
1233 223, 274, 337, 414, 509, 625, 769, 945,
1234 1162, 1429, 1757, 2161, 2657, 3267, 4017, 4940,
1235 6074, 7469, 9185, 11294, 13888, 17077, 20999, 25822,
1236 31752, 39045, 48012, 59039, 72598, 89272, 109774, 134986,
1237 165989, 204111, 250990, 308634, 379519, 466683, 573866, 705666,
1238 867737, 1067031, 1312097, 1613447, 1984009, 2439678, 3000000, 3100000
1241 PUBLIC U8 rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_MAX_CP][RG_SCH_CMN_UL_NUM_CQI];
1243 PUBLIC RgSchTbSzTbl rgTbSzTbl = {
1245 {16, 32, 56, 88, 120, 152, 176, 208, 224, 256, 288, 328, 344, 376, 392, 424, 456, 488, 504, 536, 568, 600, 616, 648, 680, 712, 744, 776, 776, 808, 840, 872, 904, 936, 968, 1000, 1032, 1032, 1064, 1096, 1128, 1160, 1192, 1224, 1256, 1256, 1288, 1320, 1352, 1384, 1416, 1416, 1480, 1480, 1544, 1544, 1608, 1608, 1608, 1672, 1672, 1736, 1736, 1800, 1800, 1800, 1864, 1864, 1928, 1928, 1992, 1992, 2024, 2088, 2088, 2088, 2152, 2152, 2216, 2216, 2280, 2280, 2280, 2344, 2344, 2408, 2408, 2472, 2472, 2536, 2536, 2536, 2600, 2600, 2664, 2664, 2728, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 2984, 2984, 3112},
1246 {24, 56, 88, 144, 176, 208, 224, 256, 328, 344, 376, 424, 456, 488, 520, 568, 600, 632, 680, 712, 744, 776, 808, 872, 904, 936, 968, 1000, 1032, 1064, 1128, 1160, 1192, 1224, 1256, 1288, 1352, 1384, 1416, 1416, 1480, 1544, 1544, 1608, 1608, 1672, 1736, 1736, 1800, 1800, 1864, 1864, 1928, 1992, 1992, 2024, 2088, 2088, 2152, 2152, 2216, 2280, 2280, 2344, 2344, 2408, 2472, 2472, 2536, 2536, 2600, 2600, 2664, 2728, 2728, 2792, 2792, 2856, 2856, 2856, 2984, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008},
1247 {32, 72, 144, 176, 208, 256, 296, 328, 376, 424, 472, 520, 568, 616, 648, 696, 744, 776, 840, 872, 936, 968, 1000, 1064, 1096, 1160, 1192, 1256, 1288, 1320, 1384, 1416, 1480, 1544, 1544, 1608, 1672, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2024, 2088, 2088, 2152, 2216, 2216, 2280, 2344, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2664, 2728, 2792, 2856, 2856, 2856, 2984, 2984, 3112, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3368, 3496, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968},
1248 {40, 104, 176, 208, 256, 328, 392, 440, 504, 568, 616, 680, 744, 808, 872, 904, 968, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1864, 1928, 1992, 2024, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6200, 6456, 6456},
1249 {56, 120, 208, 256, 328, 408, 488, 552, 632, 696, 776, 840, 904, 1000, 1064, 1128, 1192, 1288, 1352, 1416, 1480, 1544, 1608, 1736, 1800, 1864, 1928, 1992, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2600, 2664, 2728, 2792, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 4968, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992},
1250 {72, 144, 224, 328, 424, 504, 600, 680, 776, 872, 968, 1032, 1128, 1224, 1320, 1384, 1480, 1544, 1672, 1736, 1864, 1928, 2024, 2088, 2216, 2280, 2344, 2472, 2536, 2664, 2728, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3752, 3880, 4008, 4008, 4136, 4264, 4392, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9528},
1251 {328, 176, 256, 392, 504, 600, 712, 808, 936, 1032, 1128, 1224, 1352, 1480, 1544, 1672, 1736, 1864, 1992, 2088, 2216, 2280, 2408, 2472, 2600, 2728, 2792, 2984, 2984, 3112, 3240, 3368, 3496, 3496, 3624, 3752, 3880, 4008, 4136, 4136, 4264, 4392, 4584, 4584, 4776, 4776, 4968, 4968, 5160, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448},
1252 {104, 224, 328, 472, 584, 712, 840, 968, 1096, 1224, 1320, 1480, 1608, 1672, 1800, 1928, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536},
1253 {120, 256, 392, 536, 680, 808, 968, 1096, 1256, 1384, 1544, 1672, 1800, 1928, 2088, 2216, 2344, 2536, 2664, 2792, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4264, 4392, 4584, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12216, 12576, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15264},
1254 {136, 296, 456, 616, 776, 936, 1096, 1256, 1416, 1544, 1736, 1864, 2024, 2216, 2344, 2536, 2664, 2856, 2984, 3112, 3368, 3496, 3624, 3752, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 5160, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11832, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568},
1255 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8504, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080},
1256 {176, 376, 584, 776, 1000, 1192, 1384, 1608, 1800, 2024, 2216, 2408, 2600, 2792, 2984, 3240, 3496, 3624, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152},
1257 {208, 440, 680, 904, 1128, 1352, 1608, 1800, 2024, 2280, 2472, 2728, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4776, 4968, 5352, 5544, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 8760, 9144, 9528, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11064, 11448, 11832, 11832, 12216, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 23688, 24496, 24496, 24496, 24496, 25456},
1258 {224, 488, 744, 1000, 1256, 1544, 1800, 2024, 2280, 2536, 2856, 3112, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 26416, 27376, 27376, 27376, 27376, 28336, 28336},
1259 {256, 552, 840, 1128, 1416, 1736, 1992, 2280, 2600, 2856, 3112, 3496, 3752, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 5992, 6200, 6456, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704},
1260 {280, 600, 904, 1224, 1544, 1800, 2152, 2472, 2728, 3112, 3368, 3624, 4008, 4264, 4584, 4968, 5160, 5544, 5736, 6200, 6456, 6712, 6968, 7224, 7736, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008},
1261 {328, 632, 968, 1288, 1608, 1928, 2280, 2600, 2984, 3240, 3624, 3880, 4264, 4584, 4968, 5160, 5544, 5992, 6200, 6456, 6712, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 35160},
1262 {336, 696, 1064, 1416, 1800, 2152, 2536, 2856, 3240, 3624, 4008, 4392, 4776, 5160, 5352, 5736, 6200, 6456, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 39232},
1263 {376, 776, 1160, 1544, 1992, 2344, 2792, 3112, 3624, 4008, 4392, 4776, 5160, 5544, 5992, 6200, 6712, 7224, 7480, 7992, 8248, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14112, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816},
1264 {408, 840, 1288, 1736, 2152, 2600, 2984, 3496, 3880, 4264, 4776, 5160, 5544, 5992, 6456, 6968, 7224, 7736, 8248, 8504, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19080, 19848, 20616, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888},
1265 {440, 904, 1384, 1864, 2344, 2792, 3240, 3752, 4136, 4584, 5160, 5544, 5992, 6456, 6968, 7480, 7992, 8248, 8760, 9144, 9912, 10296, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 20616, 21384, 22152, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 48936, 51024, 51024, 51024},
1266 {488, 1000, 1480, 1992, 2472, 2984, 3496, 4008, 4584, 4968, 5544, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 9912, 10680, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056},
1267 {520, 1064, 1608, 2152, 2664, 3240, 3752, 4264, 4776, 5352, 5992, 6456, 6968, 7480, 7992, 8504, 9144, 9528, 10296, 10680, 11448, 11832, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 16992, 17568, 18336, 19080, 19080, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256},
1268 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776},
1269 {584, 1192, 1800, 2408, 2984, 3624, 4264, 4968, 5544, 5992, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 22920, 23688, 24496, 25456, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592},
1270 {616, 1256, 1864, 2536, 3112, 3752, 4392, 5160, 5736, 6200, 6968, 7480, 8248, 8760, 9528, 10296, 10680, 11448, 12216, 12576, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 29296, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 66592, 68808, 68808, 68808, 71112},
1271 {712, 1480, 2216, 2984, 3752, 4392, 5160, 5992, 6712, 7480, 8248, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 14688, 15264, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376, 75376}
1274 {32, 88, 152, 208, 256, 328, 376, 424, 488, 536, 600, 648, 712, 776, 808, 872, 936, 1000, 1032, 1096, 1160, 1224, 1256, 1320, 1384, 1416, 1480, 1544, 1608, 1672, 1736, 1800, 1800, 1864, 1928, 1992, 2088, 2088, 2152, 2216, 2280, 2344, 2408, 2472, 2536, 2536, 2600, 2664, 2728, 2792, 2856, 2856, 2984, 2984, 3112, 3112, 3240, 3240, 3240, 3368, 3368, 3496, 3496, 3624, 3624, 3624, 3752, 3752, 3880, 3880, 4008, 4008, 4008, 4136, 4136, 4136, 4264, 4264, 4392, 4392, 4584, 4584, 4584, 4776, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 5992, 5992, 6200},
1275 {56, 144, 208, 256, 344, 424, 488, 568, 632, 712, 776, 872, 936, 1000, 1064, 1160, 1224, 1288, 1384, 1416, 1544, 1608, 1672, 1736, 1800, 1864, 1992, 2024, 2088, 2152, 2280, 2344, 2408, 2472, 2536, 2600, 2728, 2792, 2856, 2856, 2984, 3112, 3112, 3240, 3240, 3368, 3496, 3496, 3624, 3624, 3752, 3752, 3880, 4008, 4008, 4008, 4136, 4136, 4264, 4264, 4392, 4584, 4584, 4776, 4776, 4776, 4968, 4968, 5160, 5160, 5160, 5160, 5352, 5544, 5544, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992},
1276 {72, 176, 256, 328, 424, 520, 616, 696, 776, 872, 968, 1064, 1160, 1256, 1320, 1416, 1544, 1608, 1672, 1800, 1864, 1992, 2088, 2152, 2216, 2344, 2408, 2536, 2600, 2664, 2792, 2856, 2984, 3112, 3112, 3240, 3368, 3368, 3496, 3624, 3624, 3752, 3880, 4008, 4008, 4136, 4264, 4264, 4392, 4584, 4584, 4584, 4776, 4776, 4968, 5160, 5160, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5736, 5992, 5992, 6200, 6200, 6200, 6456, 6456, 6456, 6712, 6712, 6712, 6968, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7736, 7992, 7992, 7992, 8248, 8248, 8248, 8504, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912},
1277 {104, 208, 328, 440, 568, 680, 808, 904, 1032, 1160, 1256, 1384, 1480, 1608, 1736, 1864, 1992, 2088, 2216, 2344, 2472, 2536, 2664, 2792, 2856, 2984, 3112, 3240, 3368, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4392, 4584, 4776, 4776, 4968, 4968, 5160, 5352, 5352, 5544, 5544, 5736, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6712, 6968, 6968, 7224, 7224, 7224, 7480, 7480, 7736, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12576, 12960, 12960},
1278 {120, 256, 408, 552, 696, 840, 1000, 1128, 1288, 1416, 1544, 1736, 1864, 1992, 2152, 2280, 2408, 2600, 2728, 2856, 2984, 3112, 3240, 3496, 3624, 3752, 3880, 4008, 4136, 4264, 4392, 4584, 4776, 4968, 4968, 5160, 5352, 5544, 5544, 5736, 5992, 5992, 6200, 6200, 6456, 6456, 6712, 6968, 6968, 7224, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8248, 8504, 8504, 8760, 8760, 9144, 9144, 9144, 9528, 9528, 9912, 9912, 9912, 10296, 10296, 10296, 10680, 10680, 11064, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840},
1279 {144, 328, 504, 680, 872, 1032, 1224, 1384, 1544, 1736, 1928, 2088, 2280, 2472, 2664, 2792, 2984, 3112, 3368, 3496, 3752, 3880, 4008, 4264, 4392, 4584, 4776, 4968, 5160, 5352, 5544, 5736, 5736, 5992, 6200, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7480, 7736, 7992, 7992, 8248, 8504, 8760, 8760, 9144, 9144, 9528, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11064, 11448, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12576, 12960, 12960, 13536, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19080},
1280 {176, 392, 600, 808, 1032, 1224, 1480, 1672, 1864, 2088, 2280, 2472, 2728, 2984, 3112, 3368, 3496, 3752, 4008, 4136, 4392, 4584, 4776, 4968, 5160, 5352, 5736, 5992, 5992, 6200, 6456, 6712, 6968, 6968, 7224, 7480, 7736, 7992, 8248, 8248, 8504, 8760, 9144, 9144, 9528, 9528, 9912, 9912, 10296, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 11832, 12576, 12576, 12960, 12960, 12960, 13536, 13536, 14112, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 18336, 19080, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 20616, 21384, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920},
1281 {224, 472, 712, 968, 1224, 1480, 1672, 1928, 2216, 2472, 2664, 2984, 3240, 3368, 3624, 3880, 4136, 4392, 4584, 4968, 5160, 5352, 5736, 5992, 6200, 6456, 6712, 6712, 6968, 7224, 7480, 7736, 7992, 8248, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 10680, 11064, 11448, 11448, 11832, 11832, 12216, 12576, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 14688, 14688, 15264, 15264, 15840, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376},
1282 {256, 536, 808, 1096, 1384, 1672, 1928, 2216, 2536, 2792, 3112, 3368, 3624, 3880, 4264, 4584, 4776, 4968, 5352, 5544, 5992, 6200, 6456, 6712, 6968, 7224, 7480, 7736, 7992, 8504, 8760, 9144, 9144, 9528, 9912, 9912, 10296, 10680, 11064, 11064, 11448, 11832, 12216, 12216, 12576, 12960, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15264, 15840, 15840, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19080, 19848, 19848, 19848, 20616, 20616, 21384, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 30576},
1283 {296, 616, 936, 1256, 1544, 1864, 2216, 2536, 2856, 3112, 3496, 3752, 4136, 4392, 4776, 5160, 5352, 5736, 5992, 6200, 6712, 6968, 7224, 7480, 7992, 8248, 8504, 8760, 9144, 9528, 9912, 10296, 10296, 10680, 11064, 11448, 11832, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 20616, 21384, 21384, 24264, 24264, 24264, 22920, 22920, 23688, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 27376, 28336, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160},
1284 {328, 680, 1032, 1384, 1736, 2088, 2472, 2792, 3112, 3496, 3880, 4264, 4584, 4968, 5352, 5736, 5992, 6200, 6712, 6968, 7480, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10296, 10680, 11064, 11448, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15264, 15840, 16416, 16416, 16992, 16992, 17568, 18336, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 20616, 21384, 21384, 24264, 24264, 22920, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888},
1285 {376, 776, 1192, 1608, 2024, 2408, 2792, 3240, 3624, 4008, 4392, 4776, 5352, 5736, 5992, 6456, 6968, 7224, 7736, 7992, 8504, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16416, 16992, 17568, 17568, 18336, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816},
1286 {440, 904, 1352, 1800, 2280, 2728, 3240, 3624, 4136, 4584, 4968, 5544, 5992, 6456, 6712, 7224, 7736, 8248, 8760, 9144, 9528, 9912, 10680, 11064, 11448, 11832, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15264, 15840, 16416, 16992, 17568, 17568, 18336, 19080, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22152, 22920, 23688, 23688, 24496, 24496, 25456, 25456, 25456, 25456, 27376, 27376, 28336, 28336, 28336, 29296, 29296, 30576, 30576, 30576, 31704, 31704, 32856, 32856, 32856, 34008, 34008, 35160, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 46888, 48936, 48936, 48936, 48936, 51024},
1287 {488, 1000, 1544, 2024, 2536, 3112, 3624, 4136, 4584, 5160, 5736, 6200, 6712, 7224, 7736, 8248, 8760, 9144, 9912, 10296, 10680, 11448, 11832, 12216, 12960, 13536, 14112, 14688, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 18336, 19080, 19848, 19848, 20616, 21384, 21384, 22152, 22920, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 26416, 27376, 27376, 28336, 29296, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 52752, 55056, 55056, 55056, 55056, 57336, 57336},
1288 {552, 1128, 1736, 2280, 2856, 3496, 4008, 4584, 5160, 5736, 6200, 6968, 7480, 7992, 8504, 9144, 9912, 10296, 11064, 11448, 12216, 12576, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22152, 22920, 23688, 24496, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776},
1289 {600, 1224, 1800, 2472, 3112, 3624, 4264, 4968, 5544, 6200, 6712, 7224, 7992, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15264, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 23688, 24496, 25456, 25456, 26416, 27376, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808},
1290 {632, 1288, 1928, 2600, 3240, 3880, 4584, 5160, 5992, 6456, 7224, 7736, 8504, 9144, 9912, 10296, 11064, 11832, 12216, 12960, 13536, 14112, 14688, 15840, 16416, 16992, 17568, 18336, 19080, 19848, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 24496, 25456, 26416, 26416, 27376, 28336, 28336, 29296, 30576, 30576, 31704, 31704, 32856, 32856, 34008, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 68808, 71112, 71112, 71112, 71112},
1291 {696, 1416, 2152, 2856, 3624, 4392, 5160, 5736, 6456, 7224, 7992, 8760, 9528, 10296, 10680, 11448, 12216, 12960, 13536, 14688, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 26416, 27376, 28336, 29296, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 71112, 73712, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 78704},
1292 {776, 1544, 2344, 3112, 4008, 4776, 5544, 6200, 7224, 7992, 8760, 9528, 10296, 11064, 11832, 12576, 13536, 14112, 15264, 15840, 16416, 17568, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 23688, 24496, 25456, 26416, 27376, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 32856, 34008, 35160, 35160, 36696, 37888, 37888, 39232, 39232, 40576, 40576, 42368, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936},
1293 {840, 1736, 2600, 3496, 4264, 5160, 5992, 6968, 7736, 8504, 9528, 10296, 11064, 12216, 12960, 13536, 14688, 15264, 16416, 16992, 18336, 19080, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 25456, 26416, 27376, 28336, 29296, 30576, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 61664, 63776, 63776, 66592, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800},
1294 {904, 1864, 2792, 3752, 4584, 5544, 6456, 7480, 8248, 9144, 10296, 11064, 12216, 12960, 14112, 14688, 15840, 16992, 17568, 18336, 19848, 20616, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 42368, 43816, 45352, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 97896, 101840, 101840, 101840},
1295 {1000, 1992, 2984, 4008, 4968, 5992, 6968, 7992, 9144, 9912, 11064, 12216, 12960, 14112, 15264, 15840, 16992, 18336, 19080, 19848, 21384, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 31704, 32856, 34008, 35160, 36696, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136},
1296 {1064, 2152, 3240, 4264, 5352, 6456, 7480, 8504, 9528, 10680, 11832, 12960, 14112, 15264, 16416, 16992, 18336, 19080, 20616, 21384, 22920, 23688, 24496, 25456, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 43816, 45352, 46888, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 97896, 101840, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816},
1297 {1128, 2280, 3496, 4584, 5736, 6968, 7992, 9144, 10296, 11448, 12576, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 27376, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 40576, 42368, 43816, 45352, 45352, 46888, 48936, 48936, 51024, 51024, 52752, 55056, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840,101840,101840,101840,105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496},
1298 {1192, 2408, 3624, 4968, 5992, 7224, 8504, 9912, 11064, 12216, 13536, 14688, 15840, 16992, 18336, 19848, 20616, 22152, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 42368, 43816, 45352, 46888, 46888, 48936, 51024, 51024, 52752, 52752, 55056, 57336, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 84760, 84760, 84760, 87936, 87936, 90816, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 105528, 110136, 110136, 110136, 115040, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208},
1299 {1256, 2536, 3752, 5160, 6200, 7480, 8760, 10296, 11448, 12576, 14112, 15264, 16416, 17568, 19080, 20616, 21384, 22920, 24496, 25456, 26416, 28336, 29296, 30576, 31704, 32856, 34008, 35160, 36696, 37888, 39232, 40576, 42368, 43816, 43816, 45352, 46888, 48936, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 61664, 63776, 63776, 66592, 66592, 68808, 71112, 71112, 73712, 73712, 76208, 76208, 78704, 78704, 81176, 81176, 81176, 84760, 84760, 87936, 87936, 87936, 90816, 90816, 93800, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040,115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 124464, 128496, 128496, 128496, 128496, 133208, 133208, 133208, 133208, 137792, 137792, 137792, 142248},
1300 {1480, 2984, 4392, 5992, 7480, 8760, 10296, 11832, 13536, 14688, 16416, 17568, 19080, 20616, 22152, 23688, 25456, 26416, 28336, 29296, 30576, 32856, 34008, 35160, 36696, 37888, 40576, 40576, 42368, 43816, 45352, 46888, 48936, 51024, 52752, 52752, 55056, 55056, 57336, 59256, 59256, 61664, 63776, 63776, 66592, 68808, 68808, 71112, 73712, 75376, 75376, 75376, 75376, 75376, 75376, 81176, 84760, 84760, 87936, 87936, 90816, 90816, 93800, 93800, 97896, 97896, 97896, 101840, 101840, 105528, 105528, 105528, 110136, 110136, 110136, 110136, 115040, 115040, 115040, 119816, 119816, 119816, 124464, 124464, 124464, 128496, 128496, 128496, 133208, 133208, 133208, 137792, 137792, 137792, 142248, 142248, 142248, 146856, 146856,149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776, 149776}
1303 RgSchUlIMcsTbl rgUlIMcsTbl = {
1304 {2, 0}, {2, 1}, {2, 2}, {2, 3}, {2, 4}, {2, 5},
1305 {2, 6}, {2, 7}, {2, 8}, {2, 9}, {2, 10},
1306 {4, 10}, {4, 11}, {4, 12}, {4, 13}, {4, 14},
1307 {4, 15}, {4, 16}, {4, 17}, {4, 18}, {4, 19},
1308 {6, 19}, {6, 20}, {6, 21}, {6, 22}, {6, 23},
1309 {6, 24}, {6, 25}, {6, 26}
1311 RgSchUeCatTbl rgUeCatTbl = {
1312 /*Column1:Maximum number of bits of an UL-SCH
1313 transport block transmitted within a TTI
1315 Column2:Maximum number of bits of a DLSCH
1316 transport block received within a TTI
1318 Column3:Total number of soft channel bits
1320 Column4:Support for 64QAM in UL
1322 Column5:Maximum number of DL-SCH transport
1323 block bits received within a TTI
1325 Column6:Maximum number of supported layers for
1326 spatial multiplexing in DL
1328 {5160, {10296,0}, 250368, FALSE, 10296, 1},
1329 {25456, {51024,0}, 1237248, FALSE, 51024, 2},
1330 {51024, {75376,0}, 1237248, FALSE, 102048, 2},
1331 {51024, {75376,0}, 1827072, FALSE, 150752, 2},
1332 {75376, {149776,0}, 3667200, TRUE, 299552, 4},
1333 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1334 {51024, {75376,149776}, 3654144, FALSE, 301504, 4},
1335 {149776,{299856,0}, 35982720,TRUE, 2998560, 8}
1338 /* [ccpu00138532]-ADD-The below table stores the min HARQ RTT time
1339 in Downlink for TDD and FDD. Indices 0 to 6 map to tdd UL DL config 0-6.
1340 Index 7 map to FDD */
1341 U8 rgSchCmnHarqRtt[8] = {4,7,10,9,12,15,6,8};
1342 /* Number of CFI Switchover Index is equals to 7 TDD Indexes + 1 FDD index */
1343 U8 rgSchCfiSwitchOvrWinLen[] = {7, 4, 2, 3, 2, 1, 6, 8};
1345 /* EffTbl is calculated for single layer and two layers.
1346 * CqiToTbs is calculated for single layer and two layers */
1347 RgSchCmnTbSzEff rgSchCmnNorCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1348 RgSchCmnTbSzEff rgSchCmnNorCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1349 /* New variable to store UL effiency values for normal and extended CP*/
1350 RgSchCmnTbSzEff rgSchCmnNorUlEff[1],rgSchCmnExtUlEff[1];
1351 RgSchCmnCqiToTbs rgSchCmnNorCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1352 RgSchCmnCqiToTbs rgSchCmnNorCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnNorCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1353 RgSchCmnCqiToTbs *rgSchCmnCqiToTbs[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_CFI];
1354 RgSchCmnTbSzEff rgSchCmnExtCfi1Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2Eff[RGSCH_MAX_NUM_LYR_PERCW];
1355 RgSchCmnTbSzEff rgSchCmnExtCfi3Eff[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4Eff[RGSCH_MAX_NUM_LYR_PERCW];
1356 RgSchCmnCqiToTbs rgSchCmnExtCfi1CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi2CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1357 RgSchCmnCqiToTbs rgSchCmnExtCfi3CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW], rgSchCmnExtCfi4CqiToTbs[RGSCH_MAX_NUM_LYR_PERCW];
1358 /* Include CRS REs while calculating Efficiency */
1359 RgSchCmnTbSzEff *rgSchCmnEffTbl[RGSCH_MAX_NUM_LYR_PERCW][RG_SCH_CMN_MAX_CP][RG_SCH_CMN_MAX_ANT_CONF][RG_SCH_CMN_MAX_CFI];
1360 RgSchCmnTbSzEff *rgSchCmnUlEffTbl[RG_SCH_CMN_MAX_CP];
1362 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3, 1};
1364 /* Added matrix 'rgRaPrmblToRaFrmTbl'for computation of RA sub-frames from RA preamble */
1365 RgSchRaPrmblToRaFrmTbl rgRaPrmblToRaFrmTbl = {1, 2, 2, 3};
1368 EXTERN RgUlSchdInits rgSchUlSchdInits;
1369 EXTERN RgDlSchdInits rgSchDlSchdInits;
1370 EXTERN RgDlfsSchdInits rgSchDlfsSchdInits;
1372 EXTERN RgEmtcUlSchdInits rgSchEmtcUlSchdInits;
1373 EXTERN RgEmtcDlSchdInits rgSchEmtcDlSchdInits;
1377 PRIVATE S16 rgSCHCmnUeIdleExdThrsld ARGS((
1381 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe ARGS((
1385 PRIVATE Void rgSCHCmnDelDedPreamble ARGS((
1389 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe ARGS((
1392 CmLteTimingInfo timingInfo
1394 PRIVATE Void rgSCHCmnDelRachInfo ARGS((
1398 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe ARGS((
1404 PRIVATE Void rgSCHCmnHdlHoPo ARGS((
1406 CmLListCp *raRspLst,
1407 RgSchRaReqInfo *raReq
1409 PRIVATE Void rgSCHCmnAllocPoHoGrnt ARGS((
1411 CmLListCp *raRspLst,
1413 RgSchRaReqInfo *raReq
1415 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf ARGS((
1422 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ ARGS((
1426 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ ARGS((
1430 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx ARGS((
1433 PRIVATE Void rgSCHCmnUpdRachParam ARGS((
1436 PRIVATE S16 rgSCHCmnAllocPOParam ARGS((
1444 PRIVATE Void rgSCHCmnGenPdcchOrder ARGS((
1448 PRIVATE Void rgSCHCmnCfgRachDedPrm ARGS((
1453 PRIVATE Void rgSCHCmnHdlUlInactUes ARGS((
1456 PRIVATE Void rgSCHCmnHdlDlInactUes ARGS((
1459 PRIVATE Void rgSCHCmnUlInit ARGS((Void
1461 PRIVATE Void rgSCHCmnDlInit ARGS((Void
1463 PRIVATE Void rgSCHCmnInitDlRbAllocInfo ARGS((
1464 RgSchCmnDlRbAllocInfo *allocInfo
1466 PRIVATE Void rgSCHCmnUpdUlCompEffBsr ARGS((
1470 PRIVATE Void rgSCHCmnUlSetAllUnSched ARGS((
1471 RgSchCmnUlRbAllocInfo *allocInfo
1473 PRIVATE Void rgSCHCmnUlUpdSf ARGS((
1475 RgSchCmnUlRbAllocInfo *allocInfo,
1478 PRIVATE Void rgSCHCmnUlHndlAllocRetx ARGS((
1480 RgSchCmnUlRbAllocInfo *allocInfo,
1485 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch ARGS((
1489 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch ARGS((
1493 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ ARGS((
1497 PRIVATE S16 rgSCHCmnTmrExpiry ARGS((
1498 PTR cb, /* Pointer to timer control block */
1499 S16 tmrEvnt /* Timer Event */
1501 PRIVATE S16 rgSCHCmnTmrProc ARGS((
1504 PRIVATE Void rgSCHCmnAddUeToRefreshQ ARGS((
1509 PRIVATE Void rgSCHCmnDlCcchRetx ARGS((
1511 RgSchCmnDlRbAllocInfo *allocInfo
1513 PRIVATE Void rgSCHCmnUpdUeMimoInfo ARGS((
1517 RgSchCmnCell *cellSchd
1519 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo ARGS((
1523 RgSchCmnUe *ueSchCmn,
1524 RgSchCmnCell *cellSchd,
1528 PRIVATE Void rgSCHCmnDlCcchSduRetx ARGS((
1530 RgSchCmnDlRbAllocInfo *allocInfo
1532 PRIVATE Void rgSCHCmnDlCcchSduTx ARGS((
1534 RgSchCmnDlRbAllocInfo *allocInfo
1536 PRIVATE S16 rgSCHCmnCcchSduAlloc ARGS((
1539 RgSchCmnDlRbAllocInfo *allocInfo
1541 PRIVATE S16 rgSCHCmnCcchSduDedAlloc ARGS((
1545 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc ARGS((
1551 PRIVATE Void rgSCHCmnInitVars ARGS((
1555 /*ccpu00117180 - DEL - Moved rgSCHCmnUpdVars to .x as its access is now PUBLIC */
1556 PRIVATE Void rgSCHCmnUlRbAllocForLst ARGS((
1562 CmLListCp *nonSchdLst,
1565 PRIVATE S16 rgSCHCmnUlRbAllocForUe ARGS((
1572 PRIVATE Void rgSCHCmnMsg3GrntReq ARGS((
1576 RgSchUlHqProcCb *hqProc,
1577 RgSchUlAlloc **ulAllocRef,
1580 PRIVATE Void rgSCHCmnDlCcchRarAlloc ARGS((
1583 PRIVATE Void rgSCHCmnDlCcchTx ARGS((
1585 RgSchCmnDlRbAllocInfo *allocInfo
1587 PRIVATE Void rgSCHCmnDlBcchPcch ARGS((
1589 RgSchCmnDlRbAllocInfo *allocInfo,
1590 RgInfSfAlloc *subfrmAlloc
1592 PUBLIC Bool rgSCHCmnChkInWin ARGS((
1593 CmLteTimingInfo frm,
1594 CmLteTimingInfo start,
1597 PUBLIC Bool rgSCHCmnChkPastWin ARGS((
1598 CmLteTimingInfo frm,
1601 PRIVATE Void rgSCHCmnClcAlloc ARGS((
1604 RgSchClcDlLcCb *lch,
1606 RgSchCmnDlRbAllocInfo *allocInfo
1609 PRIVATE Void rgSCHCmnClcRbAlloc ARGS((
1620 PRIVATE S16 rgSCHCmnMsg4Alloc ARGS((
1623 RgSchCmnDlRbAllocInfo *allocInfo
1625 PRIVATE S16 rgSCHCmnMsg4DedAlloc ARGS((
1629 PRIVATE Void rgSCHCmnDlRaRsp ARGS((
1631 RgSchCmnDlRbAllocInfo *allocInfo
1633 PRIVATE S16 rgSCHCmnRaRspAlloc ARGS((
1639 RgSchCmnDlRbAllocInfo *allocInfo
1641 PRIVATE Void rgSCHCmnUlUeDelAllocs ARGS((
1645 PRIVATE Void rgSCHCmnDlSetUeAllocLmt ARGS((
1650 PRIVATE S16 rgSCHCmnDlRgrCellCfg ARGS((
1655 PRIVATE Void rgSCHCmnUlAdapRetx ARGS((
1656 RgSchUlAlloc *alloc,
1657 RgSchUlHqProcCb *proc
1659 PRIVATE Void rgSCHCmnUlUpdAllocRetx ARGS((
1663 PRIVATE Void rgSCHCmnUlSfReTxAllocs ARGS((
1667 /* Fix: syed Adaptive Msg3 Retx crash. */
1669 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1673 RgrUeRecfg *ueRecfg,
1677 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg ARGS
1687 * DL RB allocation specific functions
1690 PRIVATE Void rgSCHCmnDlRbAlloc ARGS((
1692 RgSchCmnDlRbAllocInfo *allocInfo
1694 PRIVATE Void rgSCHCmnNonDlfsRbAlloc ARGS((
1696 RgSchCmnDlRbAllocInfo *allocInfo
1698 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc ARGS((
1700 RgSchDlRbAlloc *cmnAllocInfo));
1703 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj ARGS((
1705 RgSchDlRbAlloc *cmnAllocInfo,
1709 /* Added function to adjust TBSize*/
1710 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj ARGS((
1711 RgSchDlRbAlloc *allocInfo,
1712 U8 numOvrlapgPbchRb,
1718 /* Added function to find num of overlapping PBCH rb*/
1719 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs ARGS((
1722 RgSchDlRbAlloc *allocInfo,
1723 U8 *numOvrlapgPbchRb
1726 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl ARGS((
1729 RgSchDlRbAlloc *allocInfo
1733 PRIVATE Void rgSCHCmnFindCodeRate ARGS((
1736 RgSchDlRbAlloc *allocInfo,
1742 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc ARGS((
1744 RgSchCmnMsg4RbAlloc *msg4AllocInfo,
1747 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc ARGS((
1753 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc ARGS((
1760 PRIVATE U32 rgSCHCmnCalcRiv ARGS(( U8 bw,
1766 PRIVATE Void rgSCHCmnUpdHqAndDai ARGS((
1767 RgSchDlHqProcCb *hqP,
1769 RgSchDlHqTbCb *tbCb,
1772 PRIVATE S16 rgSCHCmnUlCalcAvailBw ARGS((
1774 RgrCellCfg *cellCfg,
1779 PRIVATE S16 rgSCHCmnDlKdashUlAscInit ARGS((
1782 PRIVATE S16 rgSCHCmnDlANFdbkInit ARGS((
1785 PRIVATE S16 rgSCHCmnDlNpValInit ARGS((
1788 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst ARGS((
1791 PRIVATE S16 rgSCHCmnDlCpyRachInfo ARGS((
1793 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
1796 PRIVATE S16 rgSCHCmnDlRachInfoInit ARGS((
1799 PRIVATE S16 rgSCHCmnDlPhichOffsetInit ARGS((
1804 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt ARGS
1810 PRIVATE RgSchCmnRank rgSCHCmnComputeRank ARGS
1817 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3 ARGS
1822 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3 ARGS
1827 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4 ARGS
1832 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4 ARGS
1837 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr ARGS
1844 /* comcodsepa : start */
1847 * @brief This function computes efficiency and stores in a table.
1851 * Function: rgSCHCmnCompEff
1852 * Purpose: this function computes the efficiency as number of
1853 * bytes per 1024 symbols. The CFI table is also filled
1854 * with the same information such that comparison is valid
1856 * Invoked by: Scheduler
1858 * @param[in] U8 noPdcchSym
1859 * @param[in] U8 cpType
1860 * @param[in] U8 txAntIdx
1861 * @param[in] RgSchCmnTbSzEff* effTbl
1866 PRIVATE Void rgSCHCmnCompEff
1871 RgSchCmnTbSzEff *effTbl
1874 PRIVATE Void rgSCHCmnCompEff(noPdcchSym, cpType, txAntIdx, effTbl)
1878 RgSchCmnTbSzEff *effTbl;
1883 U8 resOfCrs; /* Effective REs occupied by CRS */
1886 TRC2(rgSCHCmnCompEff);
1890 case RG_SCH_CMN_NOR_CP:
1893 case RG_SCH_CMN_EXT_CP:
1897 /* Generate a log error. This case should never be executed */
1901 /* Depending on the Tx Antenna Index, deduct the
1902 * Resource elements for the CRS */
1906 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
1909 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
1912 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
1915 /* Generate a log error. This case should never be executed */
1918 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
1919 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
1922 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
1924 /* This line computes the coding efficiency per 1024 REs */
1925 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
1927 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
1932 * @brief This function computes efficiency and stores in a table.
1936 * Function: rgSCHCmnCompUlEff
1937 * Purpose: this function computes the efficiency as number of
1938 * bytes per 1024 symbols. The CFI table is also filled
1939 * with the same information such that comparison is valid
1941 * Invoked by: Scheduler
1943 * @param[in] U8 noUlRsSym
1944 * @param[in] U8 cpType
1945 * @param[in] U8 txAntIdx
1946 * @param[in] RgSchCmnTbSzEff* effTbl
1951 PRIVATE Void rgSCHCmnCompUlEff
1955 RgSchCmnTbSzEff *effTbl
1958 PRIVATE Void rgSCHCmnCompUlEff(noUlRsSym, cpType, effTbl)
1961 RgSchCmnTbSzEff *effTbl;
1968 TRC2(rgSCHCmnCompUlEff);
1972 case RG_SCH_CMN_NOR_CP:
1975 case RG_SCH_CMN_EXT_CP:
1979 /* Generate a log error. This case should never be executed */
1983 noResPerRb = ((noSymPerRb - noUlRsSym) * RB_SCH_CMN_NUM_SCS_PER_RB);
1984 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
1987 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
1989 /* This line computes the coding efficiency per 1024 REs */
1990 (*effTbl)[i] += (rgTbSzTbl[0][i][j] * 1024) / (noResPerRb * (j+1));
1992 (*effTbl)[i] /= RG_SCH_CMN_NUM_RBS;
1998 * @brief This function computes efficiency for 2 layers and stores in a table.
2002 * Function: rgSCHCmn2LyrCompEff
2003 * Purpose: this function computes the efficiency as number of
2004 * bytes per 1024 symbols. The CFI table is also filled
2005 * with the same information such that comparison is valid
2007 * Invoked by: Scheduler
2009 * @param[in] U8 noPdcchSym
2010 * @param[in] U8 cpType
2011 * @param[in] U8 txAntIdx
2012 * @param[in] RgSchCmnTbSzEff* effTbl2Lyr
2017 PRIVATE Void rgSCHCmn2LyrCompEff
2022 RgSchCmnTbSzEff *effTbl2Lyr
2025 PRIVATE Void rgSCHCmn2LyrCompEff(noPdcchSym, cpType, txAntIdx, effTbl2Lyr)
2029 RgSchCmnTbSzEff *effTbl2Lyr;
2034 U8 resOfCrs; /* Effective REs occupied by CRS */
2037 TRC2(rgSCHCmn2LyrCompEff);
2041 case RG_SCH_CMN_NOR_CP:
2044 case RG_SCH_CMN_EXT_CP:
2048 /* Generate a log error. This case should never be executed */
2052 /* Depending on the Tx Antenna Index, deduct the
2053 * Resource elements for the CRS */
2057 resOfCrs = RG_SCH_CMN_EFF_CRS_ONE_ANT_PORT;
2060 resOfCrs = RG_SCH_CMN_EFF_CRS_TWO_ANT_PORT;
2063 resOfCrs = RG_SCH_CMN_EFF_CRS_FOUR_ANT_PORT;
2066 /* Generate a log error. This case should never be executed */
2070 noResPerRb = ((noSymPerRb - noPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - resOfCrs;
2071 for (i = 0; i < RG_SCH_CMN_NUM_TBS; i++)
2073 (*effTbl2Lyr)[i] = 0;
2074 for (j = 0; j < RG_SCH_CMN_NUM_RBS; j++)
2076 /* This line computes the coding efficiency per 1024 REs */
2077 (*effTbl2Lyr)[i] += (rgTbSzTbl[1][i][j] * 1024) / (noResPerRb * (j+1));
2079 (*effTbl2Lyr)[i] /= RG_SCH_CMN_NUM_RBS;
2086 * @brief This function initializes the rgSchCmnDciFrmtSizes table.
2090 * Function: rgSCHCmnGetDciFrmtSizes
2091 * Purpose: This function determines the sizes of all
2092 * the available DCI Formats. The order of
2093 * bits addition for each format is inaccordance
2095 * Invoked by: rgSCHCmnRgrCellCfg
2101 PRIVATE Void rgSCHCmnGetDciFrmtSizes
2106 PRIVATE Void rgSCHCmnGetDciFrmtSizes(cell)
2111 TRC2(rgSCHCmnGetDciFrmtSizes);
2113 /* DCI Format 0 size determination */
2114 rgSchCmnDciFrmtSizes[0] = 1 +
2116 rgSCHUtlLog32bitNbase2((cell->bwCfg.ulTotalBw * \
2117 (cell->bwCfg.ulTotalBw + 1))/2) +
2127 /* DCI Format 1 size determination */
2128 rgSchCmnDciFrmtSizes[1] = 1 +
2129 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2134 4 + 2 + /* HqProc Id and DAI */
2140 /* DCI Format 1A size determination */
2141 rgSchCmnDciFrmtSizes[2] = 1 + /* Flag for format0/format1a differentiation */
2142 1 + /* Localized/distributed VRB assignment flag */
2145 3 + /* Harq process Id */
2147 4 + /* Harq process Id */
2148 2 + /* UL Index or DAI */
2150 1 + /* New Data Indicator */
2153 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2154 (cell->bwCfg.dlTotalBw + 1))/2);
2155 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
2156 Since VRB is local */
2158 /* DCI Format 1B size determination */
2159 rgSchCmnDciFrmtSizes[3] = 1 +
2160 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2161 (cell->bwCfg.dlTotalBw + 1))/2) +
2171 ((cell->numTxAntPorts == 4)? 4:2) +
2174 /* DCI Format 1C size determination */
2175 /* Approximation: NDLVrbGap1 ~= Nprb for DL */
2176 rgSchCmnDciFrmtSizes[4] = (cell->bwCfg.dlTotalBw < 50)? 0:1 +
2177 (cell->bwCfg.dlTotalBw < 50)?
2178 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/2 * \
2179 (cell->bwCfg.dlTotalBw/2 + 1))/2)) :
2180 (rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw/4 * \
2181 (cell->bwCfg.dlTotalBw/4 + 1))/2)) +
2184 /* DCI Format 1D size determination */
2185 rgSchCmnDciFrmtSizes[5] = 1 +
2186 rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
2187 (cell->bwCfg.dlTotalBw + 1))/2) +
2196 ((cell->numTxAntPorts == 4)? 4:2) +
2199 /* DCI Format 2 size determination */
2200 rgSchCmnDciFrmtSizes[6] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2201 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2209 ((cell->numTxAntPorts == 4)? 6:3);
2211 /* DCI Format 2A size determination */
2212 rgSchCmnDciFrmtSizes[7] = ((cell->bwCfg.dlTotalBw < 10)?0:1) +
2213 RGSCH_CEIL(cell->bwCfg.dlTotalBw, cell->rbgSize) +
2221 ((cell->numTxAntPorts == 4)? 2:0);
2223 /* DCI Format 3 size determination */
2224 rgSchCmnDciFrmtSizes[8] = rgSchCmnDciFrmtSizes[0];
2226 /* DCI Format 3A size determination */
2227 rgSchCmnDciFrmtSizes[9] = rgSchCmnDciFrmtSizes[0];
2234 * @brief This function initializes the cmnCell->dciAggrLvl table.
2238 * Function: rgSCHCmnGetCqiDciFrmt2AggrLvl
2239 * Purpose: This function determines the Aggregation level
2240 * for each CQI level against each DCI format.
2241 * Invoked by: rgSCHCmnRgrCellCfg
2247 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl
2252 PRIVATE Void rgSCHCmnGetCqiDciFrmt2AggrLvl(cell)
2256 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2260 TRC2(rgSCHCmnGetCqiDciFrmt2AggrLvl);
2262 for (i = 0; i < RG_SCH_CMN_MAX_CQI; i++)
2264 for (j = 0; j < 10; j++)
2266 U32 pdcchBits; /* Actual number of phy bits needed for a given DCI Format
2267 * for a given CQI Level */
2268 pdcchBits = (rgSchCmnDciFrmtSizes[j] * 1024)/rgSchCmnCqiPdcchEff[i];
2270 if (pdcchBits < 192)
2272 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL2;
2275 if (pdcchBits < 384)
2277 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL4;
2280 if (pdcchBits < 768)
2282 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL8;
2285 cellSch->dciAggrLvl[i][j] = CM_LTE_AGGR_LVL16;
2292 * @brief This function initializes all the data for the scheduler.
2296 * Function: rgSCHCmnDlInit
2297 * Purpose: This function initializes the following information:
2298 * 1. Efficiency table
2299 * 2. CQI to table index - It is one row for upto 3 RBs
2300 * and another row for greater than 3 RBs
2301 * currently extended prefix is compiled out.
2302 * Invoked by: MAC intialization code..may be ActvInit
2308 PRIVATE Void rgSCHCmnDlInit
2312 PRIVATE Void rgSCHCmnDlInit()
2319 RgSchCmnTbSzEff *effTbl;
2320 RgSchCmnCqiToTbs *tbsTbl;
2322 TRC2(rgSCHCmnDlInit);
2324 /* 0 corresponds to Single layer case, 1 corresponds to 2 layers case*/
2325 /* Init Efficiency table for normal cyclic prefix */
2326 /*Initialize Efficiency table for Layer Index 0 */
2327 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2328 /*Initialize Efficiency table for each of the CFI indices. The
2329 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2330 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[0];
2331 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[0];
2332 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[0];
2333 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[0];
2334 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2335 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[0];
2336 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[0];
2337 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[0];
2338 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[0];
2339 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2340 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[0];
2341 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[0];
2342 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[0];
2343 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[0];
2345 /*Initialize CQI to TBS table for Layer Index 0 for Normal CP */
2346 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[0];
2347 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[0];
2348 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[0];
2349 rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[0];
2351 /*Intialize Efficency table for Layer Index 1 */
2352 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2353 /*Initialize Efficiency table for each of the CFI indices. The
2354 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2355 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][0] = &rgSchCmnNorCfi1Eff[1];
2356 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][1] = &rgSchCmnNorCfi2Eff[1];
2357 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][2] = &rgSchCmnNorCfi3Eff[1];
2358 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][0][3] = &rgSchCmnNorCfi4Eff[1];
2359 /*Initialize Efficiency table for Tx Antenna Port Index 1 */
2360 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][0] = &rgSchCmnNorCfi1Eff[1];
2361 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][1] = &rgSchCmnNorCfi2Eff[1];
2362 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][2] = &rgSchCmnNorCfi3Eff[1];
2363 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][1][3] = &rgSchCmnNorCfi4Eff[1];
2364 /*Initialize Efficiency table for Tx Antenna Port Index 2 */
2365 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][0] = &rgSchCmnNorCfi1Eff[1];
2366 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][1] = &rgSchCmnNorCfi2Eff[1];
2367 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][2] = &rgSchCmnNorCfi3Eff[1];
2368 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][2][3] = &rgSchCmnNorCfi4Eff[1];
2370 /*Initialize CQI to TBS table for Layer Index 1 for Normal CP */
2371 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][0] = &rgSchCmnNorCfi1CqiToTbs[1];
2372 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][1] = &rgSchCmnNorCfi2CqiToTbs[1];
2373 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][2] = &rgSchCmnNorCfi3CqiToTbs[1];
2374 rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][3] = &rgSchCmnNorCfi4CqiToTbs[1];
2376 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2378 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2380 /* EfficiencyTbl calculation incase of 2 layers for normal CP */
2381 rgSCHCmnCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx,\
2382 rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i]);
2383 rgSCHCmn2LyrCompEff((U8)(i + 1), RG_SCH_CMN_NOR_CP, idx, \
2384 rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i]);
2388 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2390 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2392 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_NOR_CP][idx][i];
2393 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_NOR_CP][i];
2394 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2395 (j >= 0) && (k > 0); --j)
2397 /* ADD CQI to MCS mapping correction
2398 * single dimensional array is replaced by 2 dimensions for different CFI*/
2399 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2401 (*tbsTbl)[k--] = (U8)j;
2408 /* effTbl,tbsTbl calculation incase of 2 layers for normal CP */
2409 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_NOR_CP][idx][i];
2410 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_NOR_CP][i];
2411 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2412 (j >= 0) && (k > 0); --j)
2414 /* ADD CQI to MCS mapping correction
2415 * single dimensional array is replaced by 2 dimensions for different CFI*/
2416 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2418 (*tbsTbl)[k--] = (U8)j;
2428 /* Efficiency Table for Extended CP */
2429 /*Initialize Efficiency table for Layer Index 0 */
2430 /*Initialize Efficiency table for Tx Antenna Port Index 0 */
2431 /*Initialize Efficiency table for each of the CFI indices. The
2432 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2433 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[0];
2434 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[0];
2435 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[0];
2436 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[0];
2437 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2438 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[0];
2439 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[0];
2440 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[0];
2441 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[0];
2442 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2443 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[0];
2444 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[0];
2445 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[0];
2446 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[0];
2448 /*Initialize CQI to TBS table for Layer Index 0 for Extended CP */
2449 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[0];
2450 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[0];
2451 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[0];
2452 rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[0];
2454 /*Initialize Efficiency table for Layer Index 1 */
2455 /*Initialize Efficiency table for each of the CFI indices. The
2456 * 4th Dimension of the rgSCHCmnEffTbl table refers to the CFI Index*/
2457 /*Initialize Efficency table for Tx Antenna Port Index 0 */
2458 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][0] = &rgSchCmnExtCfi1Eff[1];
2459 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][1] = &rgSchCmnExtCfi2Eff[1];
2460 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][2] = &rgSchCmnExtCfi3Eff[1];
2461 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][0][3] = &rgSchCmnExtCfi4Eff[1];
2462 /*Initialize Efficency table for Tx Antenna Port Index 1 */
2463 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][0] = &rgSchCmnExtCfi1Eff[1];
2464 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][1] = &rgSchCmnExtCfi2Eff[1];
2465 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][2] = &rgSchCmnExtCfi3Eff[1];
2466 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][1][3] = &rgSchCmnExtCfi4Eff[1];
2467 /*Initialize Efficency table for Tx Antenna Port Index 2 */
2468 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][0] = &rgSchCmnExtCfi1Eff[1];
2469 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][1] = &rgSchCmnExtCfi2Eff[1];
2470 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][2] = &rgSchCmnExtCfi3Eff[1];
2471 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][2][3] = &rgSchCmnExtCfi4Eff[1];
2473 /*Initialize CQI to TBS table for Layer Index 1 for Extended CP */
2474 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][0] = &rgSchCmnExtCfi1CqiToTbs[1];
2475 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][1] = &rgSchCmnExtCfi2CqiToTbs[1];
2476 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][2] = &rgSchCmnExtCfi3CqiToTbs[1];
2477 rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][3] = &rgSchCmnExtCfi4CqiToTbs[1];
2478 /* Activate this code when extended cp is supported */
2479 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2481 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2483 /* EfficiencyTbl calculation incase of 2 layers for extendedl CP */
2484 rgSCHCmnCompEff( (U8)(i + 1 ), (U8)RG_SCH_CMN_EXT_CP, idx,\
2485 rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i]);
2486 rgSCHCmn2LyrCompEff((U8)(i + 1), (U8) RG_SCH_CMN_EXT_CP,idx, \
2487 rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i]);
2491 for (idx = 0; idx < RG_SCH_CMN_MAX_ANT_CONF; idx++)
2493 for (i = 0; i < RG_SCH_CMN_MAX_CFI; i++)
2495 effTbl = rgSchCmnEffTbl[0][RG_SCH_CMN_EXT_CP][idx][i];
2496 tbsTbl = rgSchCmnCqiToTbs[0][RG_SCH_CMN_EXT_CP][i];
2497 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2498 (j >= 0) && (k > 0); --j)
2500 /* ADD CQI to MCS mapping correction
2501 * single dimensional array is replaced by 2 dimensions for different CFI*/
2502 if ((*effTbl)[j] <= rgSchCmnCqiPdschEff[i][k])
2504 (*tbsTbl)[k--] = (U8)j;
2511 /* effTbl,tbsTbl calculation incase of 2 layers for extended CP */
2512 effTbl = rgSchCmnEffTbl[1][RG_SCH_CMN_EXT_CP][idx][i];
2513 tbsTbl = rgSchCmnCqiToTbs[1][RG_SCH_CMN_EXT_CP][i];
2514 for (j = RG_SCH_CMN_NUM_TBS - 1, k = RG_SCH_CMN_MAX_CQI - 1;
2515 (j >= 0) && (k > 0); --j)
2517 /* ADD CQI to MCS mapping correction
2518 * single dimensional array is replaced by 2 dimensions for different CFI*/
2519 if ((*effTbl)[j] <= rgSchCmn2LyrCqiPdschEff[i][k])
2521 (*tbsTbl)[k--] = (U8)j;
2534 * @brief This function initializes all the data for the scheduler.
2538 * Function: rgSCHCmnUlInit
2539 * Purpose: This function initializes the following information:
2540 * 1. Efficiency table
2541 * 2. CQI to table index - It is one row for upto 3 RBs
2542 * and another row for greater than 3 RBs
2543 * currently extended prefix is compiled out.
2544 * Invoked by: MAC intialization code..may be ActvInit
2550 PRIVATE Void rgSCHCmnUlInit
2554 PRIVATE Void rgSCHCmnUlInit()
2557 U8 *mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_NOR_CP][0];
2558 RgSchCmnTbSzEff *effTbl = &rgSchCmnNorUlEff[0];
2559 CONSTANT RgSchCmnUlCqiInfo *cqiTbl = &rgSchCmnUlCqiTbl[0];
2562 TRC2(rgSCHCmnUlInit);
2564 /* Initaializing new variable added for UL eff */
2565 rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP] = &rgSchCmnNorUlEff[0];
2566 /* Reason behind using 3 as the number of symbols to rule out for
2567 * efficiency table computation would be that we are using 2 symbols for
2568 * DMRS(1 in each slot) and 1 symbol for SRS*/
2569 rgSCHCmnCompUlEff(RGSCH_UL_SYM_DMRS_SRS,RG_SCH_CMN_NOR_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_NOR_CP]);
2571 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2572 i >= 0 && j > 0; --i)
2574 if ((*effTbl)[i] <= cqiTbl[j].eff)
2576 mapTbl[j--] = (U8)i;
2583 effTbl = &rgSchCmnExtUlEff[0];
2584 mapTbl = &rgSchCmnUlCqiToTbsTbl[RG_SCH_CMN_EXT_CP][0];
2586 /* Initaializing new variable added for UL eff */
2587 rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP] = &rgSchCmnExtUlEff[0];
2588 /* Reason behind using 3 as the number of symbols to rule out for
2589 * efficiency table computation would be that we are using 2 symbols for
2590 * DMRS(1 in each slot) and 1 symbol for SRS*/
2591 rgSCHCmnCompUlEff(3,RG_SCH_CMN_EXT_CP,rgSchCmnUlEffTbl[RG_SCH_CMN_EXT_CP]);
2593 for (i = RGSCH_NUM_ITBS - 1, j = RG_SCH_CMN_UL_NUM_CQI - 1;
2594 i >= 0 && j > 0; --i)
2596 if ((*effTbl)[i] <= cqiTbl[j].eff)
2598 mapTbl[j--] = (U8)i;
2610 * @brief This function initializes all the data for the scheduler.
2614 * Function: rgSCHCmnInit
2615 * Purpose: This function initializes the following information:
2616 * 1. Efficiency table
2617 * 2. CQI to table index - It is one row for upto 3 RBs
2618 * and another row for greater than 3 RBs
2619 * currently extended prefix is compiled out.
2620 * Invoked by: MAC intialization code..may be ActvInit
2626 PUBLIC Void rgSCHCmnInit
2630 PUBLIC Void rgSCHCmnInit()
2639 rgSCHEmtcCmnDlInit();
2640 rgSCHEmtcCmnUlInit();
2646 /* Init the function pointers */
2647 rgSchCmnApis.rgSCHRgrUeCfg = rgSCHCmnRgrUeCfg;
2648 rgSchCmnApis.rgSCHRgrUeRecfg = rgSCHCmnRgrUeRecfg;
2649 rgSchCmnApis.rgSCHFreeUe = rgSCHCmnUeDel;
2650 rgSchCmnApis.rgSCHRgrCellCfg = rgSCHCmnRgrCellCfg;
2651 rgSchCmnApis.rgSCHRgrCellRecfg = rgSCHCmnRgrCellRecfg;
2652 rgSchCmnApis.rgSCHFreeCell = rgSCHCmnCellDel;
2653 rgSchCmnApis.rgSCHRgrLchCfg = rgSCHCmnRgrLchCfg;
2654 rgSchCmnApis.rgSCHRgrLcgCfg = rgSCHCmnRgrLcgCfg;
2655 rgSchCmnApis.rgSCHRgrLchRecfg = rgSCHCmnRgrLchRecfg;
2656 rgSchCmnApis.rgSCHRgrLcgRecfg = rgSCHCmnRgrLcgRecfg;
2657 rgSchCmnApis.rgSCHFreeDlLc = rgSCHCmnFreeDlLc;
2658 rgSchCmnApis.rgSCHFreeLcg = rgSCHCmnLcgDel;
2659 rgSchCmnApis.rgSCHRgrLchDel = rgSCHCmnRgrLchDel;
2660 rgSchCmnApis.rgSCHActvtUlUe = rgSCHCmnActvtUlUe;
2661 rgSchCmnApis.rgSCHActvtDlUe = rgSCHCmnActvtDlUe;
2662 rgSchCmnApis.rgSCHHdlUlTransInd = rgSCHCmnHdlUlTransInd;
2663 rgSchCmnApis.rgSCHDlDedBoUpd = rgSCHCmnDlDedBoUpd;
2664 rgSchCmnApis.rgSCHUlRecMsg3Alloc = rgSCHCmnUlRecMsg3Alloc;
2665 rgSchCmnApis.rgSCHUlCqiInd = rgSCHCmnUlCqiInd;
2666 rgSchCmnApis.rgSCHPucchDeltaPwrInd = rgSCHPwrPucchDeltaInd;
2667 rgSchCmnApis.rgSCHUlHqProcForUe = rgSCHCmnUlHqProcForUe;
2669 rgSchCmnApis.rgSCHUpdUlHqProc = rgSCHCmnUpdUlHqProc;
2671 rgSchCmnApis.rgSCHUpdBsrShort = rgSCHCmnUpdBsrShort;
2672 rgSchCmnApis.rgSCHUpdBsrTrunc = rgSCHCmnUpdBsrTrunc;
2673 rgSchCmnApis.rgSCHUpdBsrLong = rgSCHCmnUpdBsrLong;
2674 rgSchCmnApis.rgSCHUpdPhr = rgSCHCmnUpdPhr;
2675 rgSchCmnApis.rgSCHUpdExtPhr = rgSCHCmnUpdExtPhr;
2676 rgSchCmnApis.rgSCHContResUlGrant = rgSCHCmnContResUlGrant;
2677 rgSchCmnApis.rgSCHSrRcvd = rgSCHCmnSrRcvd;
2678 rgSchCmnApis.rgSCHFirstRcptnReq = rgSCHCmnFirstRcptnReq;
2679 rgSchCmnApis.rgSCHNextRcptnReq = rgSCHCmnNextRcptnReq;
2680 rgSchCmnApis.rgSCHFirstHqFdbkAlloc = rgSCHCmnFirstHqFdbkAlloc;
2681 rgSchCmnApis.rgSCHNextHqFdbkAlloc = rgSCHCmnNextHqFdbkAlloc;
2682 rgSchCmnApis.rgSCHDlProcAddToRetx = rgSCHCmnDlProcAddToRetx;
2683 rgSchCmnApis.rgSCHDlCqiInd = rgSCHCmnDlCqiInd;
2685 rgSchCmnApis.rgSCHUlProcAddToRetx = rgSCHCmnEmtcUlProcAddToRetx;
2688 rgSchCmnApis.rgSCHSrsInd = rgSCHCmnSrsInd;
2690 rgSchCmnApis.rgSCHDlTARpt = rgSCHCmnDlTARpt;
2691 rgSchCmnApis.rgSCHDlRlsSubFrm = rgSCHCmnDlRlsSubFrm;
2692 rgSchCmnApis.rgSCHUeReset = rgSCHCmnUeReset;
2694 rgSchCmnApis.rgSCHHdlCrntiCE = rgSCHCmnHdlCrntiCE;
2695 rgSchCmnApis.rgSCHDlProcAck = rgSCHCmnDlProcAck;
2696 rgSchCmnApis.rgSCHDlRelPdcchFbk = rgSCHCmnDlRelPdcchFbk;
2697 rgSchCmnApis.rgSCHUlSpsRelInd = rgSCHCmnUlSpsRelInd;
2698 rgSchCmnApis.rgSCHUlSpsActInd = rgSCHCmnUlSpsActInd;
2699 rgSchCmnApis.rgSCHUlCrcFailInd = rgSCHCmnUlCrcFailInd;
2700 rgSchCmnApis.rgSCHUlCrcInd = rgSCHCmnUlCrcInd;
2702 rgSchCmnApis.rgSCHDrxStrtInActvTmrInUl = rgSCHCmnDrxStrtInActvTmrInUl;
2703 rgSchCmnApis.rgSCHUpdUeDataIndLcg = rgSCHCmnUpdUeDataIndLcg;
2705 for (idx = 0; idx < RGSCH_NUM_SCHEDULERS; ++idx)
2707 rgSchUlSchdInits[idx](&rgSchUlSchdTbl[idx]);
2708 rgSchDlSchdInits[idx](&rgSchDlSchdTbl[idx]);
2711 for (idx = 0; idx < RGSCH_NUM_EMTC_SCHEDULERS; ++idx)
2713 rgSchEmtcUlSchdInits[idx](&rgSchEmtcUlSchdTbl[idx]);
2714 rgSchEmtcDlSchdInits[idx](&rgSchEmtcDlSchdTbl[idx]);
2717 #if (defined (RG_PHASE2_SCHED) && defined(TFU_UPGRADE))
2718 for (idx = 0; idx < RGSCH_NUM_DLFS_SCHEDULERS; ++idx)
2720 rgSchDlfsSchdInits[idx](&rgSchDlfsSchdTbl[idx]);
2724 rgSchCmnApis.rgSCHRgrSCellUeCfg = rgSCHCmnRgrSCellUeCfg;
2725 rgSchCmnApis.rgSCHRgrSCellUeDel = rgSCHCmnRgrSCellUeDel;
2732 * @brief This function is a wrapper to call scheduler specific API.
2736 * Function: rgSCHCmnDlRlsSubFrm
2737 * Purpose: Releases scheduler Information from DL SubFrm.
2741 * @param[in] RgSchCellCb *cell
2742 * @param[out] CmLteTimingInfo frm
2747 PUBLIC Void rgSCHCmnDlRlsSubFrm
2753 PUBLIC Void rgSCHCmnDlRlsSubFrm(cell, frm)
2755 CmLteTimingInfo frm;
2758 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2761 TRC2(rgSCHCmnDlRlsSubFrm);
2763 /* Get the pointer to the subframe */
2764 sf = rgSCHUtlSubFrmGet(cell, frm);
2766 rgSCHUtlSubFrmPut(cell, sf);
2769 /* Re-initialize DLFS specific information for the sub-frame */
2770 cellSch->apisDlfs->rgSCHDlfsReinitSf(cell, sf);
2778 * @brief This function is the starting function for DL allocation.
2782 * Function: rgSCHCmnDlCmnChAlloc
2783 * Purpose: Scheduling for downlink. It performs allocation in the order
2784 * of priority wich BCCH/PCH first, CCCH, Random Access and TA.
2786 * Invoked by: Scheduler
2788 * @param[in] RgSchCellCb* cell
2789 * @param[out] RgSchCmnDlRbAllocInfo* allocInfo
2794 PRIVATE Void rgSCHCmnDlCcchRarAlloc
2799 PRIVATE Void rgSCHCmnDlCcchRarAlloc(cell)
2803 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2805 TRC2(rgSCHCmnDlCcchRarAlloc);
2807 rgSCHCmnDlCcchRetx(cell, &cellSch->allocInfo);
2808 /* LTE_ADV_FLAG_REMOVED_START */
2809 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2811 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2813 /*eNodeB need to blank the subframe */
2817 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2822 rgSCHCmnDlCcchTx(cell, &cellSch->allocInfo);
2824 /* LTE_ADV_FLAG_REMOVED_END */
2828 /*Added these function calls for processing CCCH SDU arriving
2829 * after guard timer expiry.Functions differ from above two functions
2830 * in using ueCb instead of raCb.*/
2831 rgSCHCmnDlCcchSduRetx(cell, &cellSch->allocInfo);
2832 /* LTE_ADV_FLAG_REMOVED_START */
2833 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2835 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2837 /*eNodeB need to blank the subframe */
2841 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2846 rgSCHCmnDlCcchSduTx(cell, &cellSch->allocInfo);
2848 /* LTE_ADV_FLAG_REMOVED_END */
2852 if(cellSch->ul.msg3SchdIdx != RGSCH_INVALID_INFO)
2854 /* Do not schedule msg3 if there is a CFI change ongoing */
2855 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2857 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2861 /* LTE_ADV_FLAG_REMOVED_START */
2862 if(RG_SCH_ABS_ENABLED_ABS_SF == cell->lteAdvCb.absDlSfInfo)
2864 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
2866 /*eNodeB need to blank the subframe */
2870 /* Do not schedule msg3 if there is a CFI change ongoing */
2871 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2873 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2879 /* Do not schedule msg3 if there is a CFI change ongoing */
2880 if (cellSch->dl.currCfi == cellSch->dl.newCfi)
2882 rgSCHCmnDlRaRsp(cell, &cellSch->allocInfo);
2885 /* LTE_ADV_FLAG_REMOVED_END */
2893 * @brief Scheduling for CCCH SDU.
2897 * Function: rgSCHCmnCcchSduAlloc
2898 * Purpose: Scheduling for CCCH SDU
2900 * Invoked by: Scheduler
2902 * @param[in] RgSchCellCb* cell
2903 * @param[in] RgSchUeCb* ueCb
2904 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2909 PRIVATE S16 rgSCHCmnCcchSduAlloc
2913 RgSchCmnDlRbAllocInfo *allocInfo
2916 PRIVATE S16 rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)
2919 RgSchCmnDlRbAllocInfo *allocInfo;
2922 RgSchDlRbAlloc *rbAllocInfo;
2923 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2924 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
2926 TRC2(rgSCHCmnCcchSduAlloc);
2928 /* Return if subframe BW exhausted */
2929 if (allocInfo->ccchSduAlloc.ccchSduDlSf->bw <=
2930 allocInfo->ccchSduAlloc.ccchSduDlSf->bwAssigned)
2932 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2933 "bw<=bwAssigned for UEID:%d",ueCb->ueId);
2937 if (rgSCHDhmGetCcchSduHqProc(ueCb, cellSch->dl.time, &(ueDl->proc)) != ROK)
2939 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2940 "rgSCHDhmGetCcchSduHqProc failed UEID:%d",ueCb->ueId);
2944 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
2945 rbAllocInfo->dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
2947 if (rgSCHCmnCcchSduDedAlloc(cell, ueCb) != ROK)
2949 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
2950 rgSCHDhmRlsHqpTb(ueDl->proc, 0, FALSE);
2951 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
2952 "rgSCHCmnCcchSduDedAlloc failed UEID:%d",ueCb->ueId);
2955 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduTxLst, &ueDl->proc->reqLnk);
2956 ueDl->proc->reqLnk.node = (PTR)ueDl->proc;
2957 allocInfo->ccchSduAlloc.ccchSduDlSf->schdCcchUe++;
2962 * @brief This function scheduler for downlink CCCH messages.
2966 * Function: rgSCHCmnDlCcchSduTx
2967 * Purpose: Scheduling for downlink CCCH
2969 * Invoked by: Scheduler
2971 * @param[in] RgSchCellCb *cell
2972 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
2977 PRIVATE Void rgSCHCmnDlCcchSduTx
2980 RgSchCmnDlRbAllocInfo *allocInfo
2983 PRIVATE Void rgSCHCmnDlCcchSduTx(cell, allocInfo)
2985 RgSchCmnDlRbAllocInfo *allocInfo;
2990 RgSchCmnDlUe *ueCmnDl;
2991 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
2993 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
2995 TRC2(rgSCHCmnDlCcchSduTx);
2997 node = cell->ccchSduUeLst.first;
3000 if(cellSch->dl.maxCcchPerDlSf &&
3001 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3007 ueCb = (RgSchUeCb *)(node->node);
3008 ueCmnDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3010 /* Fix : syed postpone scheduling for this
3011 * until msg4 is done */
3012 /* Fix : syed RLC can erroneously send CCCH SDU BO
3013 * twice. Hence an extra guard to avoid if already
3014 * scheduled for RETX */
3015 if ((!(ueCb->dl.dlInactvMask & RG_HQENT_INACTIVE)) &&
3018 if ((rgSCHCmnCcchSduAlloc(cell, ueCb, allocInfo)) != ROK)
3025 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"ERROR!! THIS SHOULD "
3026 "NEVER HAPPEN for UEID:%d", ueCb->ueId);
3036 * @brief This function scheduler for downlink CCCH messages.
3040 * Function: rgSCHCmnDlCcchTx
3041 * Purpose: Scheduling for downlink CCCH
3043 * Invoked by: Scheduler
3045 * @param[in] RgSchCellCb *cell
3046 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3051 PRIVATE Void rgSCHCmnDlCcchTx
3054 RgSchCmnDlRbAllocInfo *allocInfo
3057 PRIVATE Void rgSCHCmnDlCcchTx(cell, allocInfo)
3059 RgSchCmnDlRbAllocInfo *allocInfo;
3064 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3065 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3067 TRC2(rgSCHCmnDlCcchTx);
3069 node = cell->raInfo.toBeSchdLst.first;
3072 if(cellSch->dl.maxCcchPerDlSf &&
3073 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3080 raCb = (RgSchRaCb *)(node->node);
3082 /* Address allocation for this UE for MSG 4 */
3083 /* Allocation for Msg4 */
3084 if ((rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)) != ROK)
3095 * @brief This function scheduler for downlink CCCH messages.
3099 * Function: rgSCHCmnDlCcchSduRetx
3100 * Purpose: Scheduling for downlink CCCH
3102 * Invoked by: Scheduler
3104 * @param[in] RgSchCellCb *cell
3105 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3110 PRIVATE Void rgSCHCmnDlCcchSduRetx
3113 RgSchCmnDlRbAllocInfo *allocInfo
3116 PRIVATE Void rgSCHCmnDlCcchSduRetx(cell, allocInfo)
3118 RgSchCmnDlRbAllocInfo *allocInfo;
3121 RgSchDlRbAlloc *rbAllocInfo;
3123 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3125 RgSchDlHqProcCb *hqP;
3128 RgSchDlSf *dlSf = allocInfo->ccchSduAlloc.ccchSduDlSf;
3130 TRC2(rgSCHCmnDlCcchSduRetx);
3132 node = cellSch->dl.ccchSduRetxLst.first;
3135 if(cellSch->dl.maxCcchPerDlSf &&
3136 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3143 hqP = (RgSchDlHqProcCb *)(node->node);
3146 /* DwPts Scheduling Changes Start */
3148 if (rgSCHCmnRetxAvoidTdd(allocInfo->ccchSduAlloc.ccchSduDlSf,
3154 /* DwPts Scheduling Changes End */
3156 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3160 ueCb = (RgSchUeCb*)(hqP->hqE->ue);
3161 ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
3163 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
3164 /* Fill RB Alloc Info */
3165 rbAllocInfo->dlSf = dlSf;
3166 rbAllocInfo->tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3167 rbAllocInfo->rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3168 /* Fix : syed iMcs setting did not correspond to RETX */
3169 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3170 rbAllocInfo->tbInfo[0].imcs);
3171 rbAllocInfo->rnti = ueCb->ueId;
3172 rbAllocInfo->tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3173 /* Fix : syed Copying info in entirety without depending on stale TX information */
3174 rbAllocInfo->tbInfo[0].tbCb = &hqP->tbInfo[0];
3175 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
3176 /* Fix : syed Assigning proc to scratchpad */
3179 retxBw += rbAllocInfo->rbsReq;
3181 cmLListAdd2Tail(&allocInfo->ccchSduAlloc.ccchSduRetxLst, \
3183 hqP->reqLnk.node = (PTR)hqP;
3187 dlSf->bwAssigned += retxBw;
3193 * @brief This function scheduler for downlink CCCH messages.
3197 * Function: rgSCHCmnDlCcchRetx
3198 * Purpose: Scheduling for downlink CCCH
3200 * Invoked by: Scheduler
3202 * @param[in] RgSchCellCb *cell
3203 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3208 PRIVATE Void rgSCHCmnDlCcchRetx
3211 RgSchCmnDlRbAllocInfo *allocInfo
3214 PRIVATE Void rgSCHCmnDlCcchRetx(cell, allocInfo)
3216 RgSchCmnDlRbAllocInfo *allocInfo;
3220 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3222 RgSchDlHqProcCb *hqP;
3224 RgSchDlSf *dlSf = allocInfo->msg4Alloc.msg4DlSf;
3226 TRC2(rgSCHCmnDlCcchRetx);
3228 node = cellSch->dl.msg4RetxLst.first;
3231 if(cellSch->dl.maxCcchPerDlSf &&
3232 dlSf->schdCcchUe == cellSch->dl.maxCcchPerDlSf)
3238 hqP = (RgSchDlHqProcCb *)(node->node);
3242 /* DwPts Scheduling Changes Start */
3244 if (rgSCHCmnRetxAvoidTdd(allocInfo->msg4Alloc.msg4DlSf,
3250 /* DwPts Scheduling Changes End */
3252 if (hqP->tbInfo[0].dlGrnt.numRb > (dlSf->bw - dlSf->bwAssigned))
3256 raCb = (RgSchRaCb*)(hqP->hqE->raCb);
3257 /* Fill RB Alloc Info */
3258 raCb->rbAllocInfo.dlSf = dlSf;
3259 raCb->rbAllocInfo.tbInfo[0].bytesReq = hqP->tbInfo[0].ccchSchdInfo.totBytes;
3260 raCb->rbAllocInfo.rbsReq = hqP->tbInfo[0].dlGrnt.numRb;
3261 /* Fix : syed iMcs setting did not correspond to RETX */
3262 RG_SCH_CMN_GET_MCS_FOR_RETX((&hqP->tbInfo[0]),
3263 raCb->rbAllocInfo.tbInfo[0].imcs);
3264 raCb->rbAllocInfo.rnti = raCb->tmpCrnti;
3265 raCb->rbAllocInfo.tbInfo[0].noLyr = hqP->tbInfo[0].numLyrs;
3266 /* Fix; syed Copying info in entirety without depending on stale TX information */
3267 raCb->rbAllocInfo.tbInfo[0].tbCb = &hqP->tbInfo[0];
3268 raCb->rbAllocInfo.tbInfo[0].schdlngForTb = TRUE;
3270 retxBw += raCb->rbAllocInfo.rbsReq;
3272 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4RetxLst, \
3274 hqP->reqLnk.node = (PTR)hqP;
3278 dlSf->bwAssigned += retxBw;
3284 * @brief This function implements scheduler DL allocation for
3285 * for broadcast (on PDSCH) and paging.
3289 * Function: rgSCHCmnDlBcchPcch
3290 * Purpose: This function implements scheduler for DL allocation
3291 * for broadcast (on PDSCH) and paging.
3293 * Invoked by: Scheduler
3295 * @param[in] RgSchCellCb* cell
3301 PRIVATE Void rgSCHCmnDlBcchPcch
3304 RgSchCmnDlRbAllocInfo *allocInfo,
3305 RgInfSfAlloc *subfrmAlloc
3308 PRIVATE Void rgSCHCmnDlBcchPcch(cell, allocInfo, subfrmAlloc)
3310 RgSchCmnDlRbAllocInfo *allocInfo;
3311 RgInfSfAlloc *subfrmAlloc;
3314 CmLteTimingInfo frm;
3316 RgSchClcDlLcCb *pcch;
3320 RgSchClcDlLcCb *bcch, *bch;
3321 #endif/*RGR_SI_SCH*/
3324 TRC2(rgSCHCmnDlBcchPcch);
3326 frm = cell->crntTime;
3328 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
3329 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
3330 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
3332 // RGSCH_SUBFRAME_INDEX(frm);
3333 //RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
3336 /* Compute the subframe for which allocation is being made */
3337 /* essentially, we need pointer to the dl frame for this subframe */
3338 sf = rgSCHUtlSubFrmGet(cell, frm);
3342 bch = rgSCHDbmGetBcchOnBch(cell);
3343 #if (ERRCLASS & ERRCLS_DEBUG)
3346 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on BCH is not configured");
3350 if (bch->boLst.first != NULLP)
3352 bo = (RgSchClcBoRpt *)(bch->boLst.first->node);
3353 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3355 sf->bch.tbSize = bo->bo;
3356 cmLListDelFrm(&bch->boLst, bch->boLst.first);
3357 /* ccpu00117052 - MOD - Passing double pointer
3358 for proper NULLP assignment*/
3359 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(*bo));
3360 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, bch->lcId,TRUE);
3365 if ((frm.sfn % 4 == 0) && (frm.subframe == 0))
3370 allocInfo->bcchAlloc.schdFirst = FALSE;
3371 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
3372 #if (ERRCLASS & ERRCLS_DEBUG)
3375 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3379 if (bcch->boLst.first != NULLP)
3381 bo = (RgSchClcBoRpt *)(bcch->boLst.first->node);
3383 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3385 allocInfo->bcchAlloc.schdFirst = TRUE;
3386 /* Time to perform allocation for this BCCH transmission */
3387 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3391 if(!allocInfo->bcchAlloc.schdFirst)
3394 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
3395 #if (ERRCLASS & ERRCLS_DEBUG)
3398 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"BCCH on DLSCH is not configured");
3402 lnk = bcch->boLst.first;
3403 while (lnk != NULLP)
3405 bo = (RgSchClcBoRpt *)(lnk->node);
3407 valid = rgSCHCmnChkInWin(frm, bo->timeToTx, bo->maxTimeToTx);
3411 bo->i = RGSCH_CALC_SF_DIFF(frm, bo->timeToTx);
3412 /* Time to perform allocation for this BCCH transmission */
3413 rgSCHCmnClcAlloc(cell, sf, bcch, RGSCH_SI_RNTI, allocInfo);
3418 valid = rgSCHCmnChkPastWin(frm, bo->maxTimeToTx);
3421 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
3422 /* ccpu00117052 - MOD - Passing double pointer
3423 for proper NULLP assignment*/
3424 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo,
3425 sizeof(RgSchClcBoRpt));
3431 rgSCHDlSiSched(cell, allocInfo, subfrmAlloc);
3432 #endif/*RGR_SI_SCH*/
3434 pcch = rgSCHDbmGetPcch(cell);
3438 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"PCCH on DLSCH is not configured");
3442 if (pcch->boLst.first != NULLP)
3444 bo = (RgSchClcBoRpt *)(pcch->boLst.first->node);
3446 if (RGSCH_TIMEINFO_SAME(frm, bo->timeToTx))
3448 /* Time to perform allocation for this PCCH transmission */
3449 rgSCHCmnClcAlloc(cell, sf, pcch, RGSCH_P_RNTI, allocInfo);
3457 * Fun: rgSCHCmnChkInWin
3459 * Desc: This function checks if frm occurs in window
3461 * Ret: TRUE - if in window
3466 * File: rg_sch_cmn.c
3470 PUBLIC Bool rgSCHCmnChkInWin
3472 CmLteTimingInfo frm,
3473 CmLteTimingInfo start,
3477 PUBLIC Bool rgSCHCmnChkInWin(frm, start, end)
3478 CmLteTimingInfo frm;
3479 CmLteTimingInfo start;
3480 CmLteTimingInfo end;
3485 TRC2(rgSCHCmnChkInWin);
3487 if (end.sfn > start.sfn)
3489 if (frm.sfn > start.sfn
3490 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3492 if (frm.sfn < end.sfn
3494 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3496 || (frm.sfn == end.sfn && frm.slot <= start.slot))
3503 /* Testing for wrap around, sfn wraparound check should be enough */
3504 else if (end.sfn < start.sfn)
3506 if (frm.sfn > start.sfn
3507 || (frm.sfn == start.sfn && frm.slot >= start.slot))
3513 if (frm.sfn < end.sfn
3514 || (frm.sfn == end.sfn && frm.slot <= end.slot))
3520 else /* start.sfn == end.sfn */
3522 if (frm.sfn == start.sfn
3523 && (frm.slot >= start.slot
3524 && frm.slot <= end.slot))
3531 } /* end of rgSCHCmnChkInWin*/
3535 * Fun: rgSCHCmnChkPastWin
3537 * Desc: This function checks if frm has gone past window edge
3539 * Ret: TRUE - if past window edge
3544 * File: rg_sch_cmn.c
3548 PUBLIC Bool rgSCHCmnChkPastWin
3550 CmLteTimingInfo frm,
3554 PUBLIC Bool rgSCHCmnChkPastWin(frm, end)
3555 CmLteTimingInfo frm;
3556 CmLteTimingInfo end;
3559 CmLteTimingInfo refFrm = end;
3562 TRC2(rgSCHCmnChkPastWin);
3564 RGSCH_INCR_FRAME(refFrm.sfn);
3565 RGSCH_INCR_SUB_FRAME(end, 1);
3566 pastWin = rgSCHCmnChkInWin(frm, end, refFrm);
3569 } /* end of rgSCHCmnChkPastWin*/
3572 * @brief This function implements allocation of the resources for common
3573 * channels BCCH, PCCH.
3577 * Function: rgSCHCmnClcAlloc
3578 * Purpose: This function implements selection of number of RBs based
3579 * the allowed grant for the service. It is also responsible
3580 * for selection of MCS for the transmission.
3582 * Invoked by: Scheduler
3584 * @param[in] RgSchCellCb *cell,
3585 * @param[in] RgSchDlSf *sf,
3586 * @param[in] RgSchClcDlLcCb *lch,
3587 * @param[in] U16 rnti,
3588 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3593 PRIVATE Void rgSCHCmnClcAlloc
3597 RgSchClcDlLcCb *lch,
3599 RgSchCmnDlRbAllocInfo *allocInfo
3602 PRIVATE Void rgSCHCmnClcAlloc(cell, sf, lch, rnti, allocInfo)
3605 RgSchClcDlLcCb *lch;
3607 RgSchCmnDlRbAllocInfo *allocInfo;
3610 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
3617 U8 cfi = cellDl->currCfi;
3620 TRC2(rgSCHCmnClcAlloc);
3622 bo = (RgSchClcBoRpt *)(lch->boLst.first->node);
3626 /* rgSCHCmnClcRbAllocForFxdTb(cell, bo->bo, cellDl->ccchCqi, &rb);*/
3627 if(cellDl->bitsPerRb==0)
3629 while ((rgTbSzTbl[0][0][rb]) < (tbs*8))
3637 rb = RGSCH_CEIL((tbs*8), cellDl->bitsPerRb);
3639 /* DwPTS Scheduling Changes Start */
3641 if(sf->sfType == RG_SCH_SPL_SF_DATA)
3643 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
3645 /* Calculate the less RE's because of DwPTS */
3646 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
3648 /* Increase number of RBs in Spl SF to compensate for lost REs */
3649 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
3652 /* DwPTS Scheduling Changes End */
3653 /*ccpu00115595- end*/
3654 /* additional check to see if required RBs
3655 * exceeds the available */
3656 if (rb > sf->bw - sf->bwAssigned)
3658 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"BW allocation "
3659 "failed for CRNTI:%d",rnti);
3663 /* Update the subframe Allocated BW field */
3664 sf->bwAssigned = sf->bwAssigned + rb;
3665 /* Fill in the BCCH/PCCH transmission info to the RBAllocInfo struct */
3666 if (rnti == RGSCH_SI_RNTI)
3668 allocInfo->bcchAlloc.rnti = rnti;
3669 allocInfo->bcchAlloc.dlSf = sf;
3670 allocInfo->bcchAlloc.tbInfo[0].bytesReq = tbs;
3671 allocInfo->bcchAlloc.rbsReq = rb;
3672 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
3673 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
3674 /* Nprb indication at PHY for common Ch */
3675 allocInfo->bcchAlloc.nPrb = bo->nPrb;
3679 allocInfo->pcchAlloc.rnti = rnti;
3680 allocInfo->pcchAlloc.dlSf = sf;
3681 allocInfo->pcchAlloc.tbInfo[0].bytesReq = tbs;
3682 allocInfo->pcchAlloc.rbsReq = rb;
3683 allocInfo->pcchAlloc.tbInfo[0].imcs = mcs;
3684 allocInfo->pcchAlloc.tbInfo[0].noLyr = 1;
3685 allocInfo->pcchAlloc.nPrb = bo->nPrb;
3692 * @brief This function implements PDCCH allocation for common channels.
3696 * Function: rgSCHCmnCmnPdcchAlloc
3697 * Purpose: This function implements allocation of PDCCH for a UE.
3698 * 1. This uses index 0 of PDCCH table for efficiency.
3699 * 2. Uses he candidate PDCCH count for the aggr level.
3700 * 3. Look for availability for each candidate and choose
3701 * the first one available.
3703 * Invoked by: Scheduler
3705 * @param[in] RgSchCellCb *cell
3706 * @param[in] RgSchDlSf *sf
3707 * @return RgSchPdcch *
3708 * -# NULLP when unsuccessful
3712 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc
3718 PUBLIC RgSchPdcch *rgSCHCmnCmnPdcchAlloc(cell, subFrm)
3723 CmLteAggrLvl aggrLvl;
3724 RgSchPdcchInfo *pdcchInfo;
3726 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3727 U8 numCce; /*store num CCEs based on
3728 aggregation level */
3729 TRC2(rgSCHCmnCmnPdcchAlloc);
3731 aggrLvl = cellSch->dl.cmnChAggrLvl;
3733 pdcchInfo = &(subFrm->pdcchInfo);
3735 /* Updating the no. of nCce in pdcchInfo, in case if CFI
3738 if(subFrm->nCce != pdcchInfo->nCce)
3740 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
3743 if(cell->nCce != pdcchInfo->nCce)
3745 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
3751 case CM_LTE_AGGR_LVL4:
3754 case CM_LTE_AGGR_LVL8:
3757 case CM_LTE_AGGR_LVL16:
3764 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
3767 pdcch->isSpsRnti = FALSE;
3769 /* Increment the CCE used counter in the current subframe */
3770 subFrm->cceCnt += numCce;
3771 pdcch->pdcchSearchSpace = RG_SCH_CMN_SEARCH_SPACE;
3776 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
3777 subFrm->isCceFailure = TRUE;
3779 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
3780 "PDCCH ERR: NO PDDCH AVAIL IN COMMON SEARCH SPACE aggr:%u",
3787 * @brief This function implements bandwidth allocation for common channels.
3791 * Function: rgSCHCmnClcRbAlloc
3792 * Purpose: This function implements bandwith allocation logic
3793 * for common control channels.
3795 * Invoked by: Scheduler
3797 * @param[in] RgSchCellCb* cell
3801 * @param[in] U32 *tbs
3802 * @param[in] U8 *mcs
3803 * @param[in] RgSchDlSf *sf
3809 PUBLIC Void rgSCHCmnClcRbAlloc
3822 PUBLIC Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, iTbs, isSpsBo)
3835 PRIVATE Void rgSCHCmnClcRbAlloc
3846 PRIVATE Void rgSCHCmnClcRbAlloc(cell, bo, cqi, rb, tbs, mcs, sf)
3855 #endif /* LTEMAC_SPS */
3858 RgSchCmnTbSzEff *effTbl;
3861 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3862 U8 cfi = cellSch->dl.currCfi;
3864 TRC2(rgSCHCmnClcRbAlloc);
3866 /* first get the CQI to MCS table and determine the number of RBs */
3867 effTbl = (RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]);
3868 iTbsVal = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[cqi];
3869 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3871 /* Efficiency is number of bits per 1024 REs */
3872 eff = (*effTbl)[iTbsVal];
3874 /* Get the number of REs needed for this bo */
3875 noRes = ((bo * 8 * 1024) / eff );
3877 /* Get the number of RBs needed for this transmission */
3878 /* Number of RBs = No of REs / No of REs per RB */
3879 tmpRb = RGSCH_CEIL(noRes, cellSch->dl.noResPerRb[cfi]);
3880 /* KWORK_FIX: added check to see if rb has crossed maxRb*/
3881 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3882 if (tmpRb > cellSch->dl.maxDlBwPerUe)
3884 tmpRb = cellSch->dl.maxDlBwPerUe;
3886 while ((rgTbSzTbl[0][iTbsVal][tmpRb-1]/8) < bo &&
3887 (tmpRb < cellSch->dl.maxDlBwPerUe))
3890 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, rgTbSzTbl[0][0], (tmpRb-1));
3892 *tbs = rgTbSzTbl[0][iTbsVal][tmpRb-1]/8;
3894 RG_SCH_CMN_DL_TBS_TO_MCS(iTbsVal, *mcs);
3902 * @brief Scheduling for MSG4.
3906 * Function: rgSCHCmnMsg4Alloc
3907 * Purpose: Scheduling for MSG4
3909 * Invoked by: Scheduler
3911 * @param[in] RgSchCellCb* cell
3912 * @param[in] RgSchRaCb* raCb
3913 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
3918 PRIVATE S16 rgSCHCmnMsg4Alloc
3922 RgSchCmnDlRbAllocInfo *allocInfo
3925 PRIVATE S16 rgSCHCmnMsg4Alloc(cell, raCb, allocInfo)
3928 RgSchCmnDlRbAllocInfo *allocInfo;
3931 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
3933 TRC2(rgSCHCmnMsg4Alloc);
3935 /* SR_RACH_STATS : MSG4 TO BE TXED */
3937 /* Return if subframe BW exhausted */
3938 if (allocInfo->msg4Alloc.msg4DlSf->bw <=
3939 allocInfo->msg4Alloc.msg4DlSf->bwAssigned)
3941 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId ,
3946 if (rgSCHDhmGetMsg4HqProc(raCb, cellSch->dl.time) != ROK)
3948 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3949 "rgSCHDhmGetMsg4HqProc failed");
3953 raCb->rbAllocInfo.dlSf = allocInfo->msg4Alloc.msg4DlSf;
3955 if (rgSCHCmnMsg4DedAlloc(cell, raCb) != ROK)
3957 /* Fix : syed Minor failure handling, release hqP if Unsuccessful */
3958 rgSCHDhmRlsHqpTb(raCb->dlHqE->msg4Proc, 0, FALSE);
3959 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
3960 "rgSCHCmnMsg4DedAlloc failed.");
3963 cmLListAdd2Tail(&allocInfo->msg4Alloc.msg4TxLst, &raCb->dlHqE->msg4Proc->reqLnk);
3964 raCb->dlHqE->msg4Proc->reqLnk.node = (PTR)raCb->dlHqE->msg4Proc;
3965 allocInfo->msg4Alloc.msg4DlSf->schdCcchUe++;
3972 * @brief This function implements PDCCH allocation for an UE.
3976 * Function: PdcchAlloc
3977 * Purpose: This function implements allocation of PDCCH for an UE.
3978 * 1. Get the aggregation level for the CQI of the UE.
3979 * 2. Get the candidate PDCCH count for the aggr level.
3980 * 3. Look for availability for each candidate and choose
3981 * the first one available.
3983 * Invoked by: Scheduler
3988 * @param[in] dciFrmt
3989 * @return RgSchPdcch *
3990 * -# NULLP when unsuccessful
3994 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc
4000 TfuDciFormat dciFrmt,
4004 PUBLIC RgSchPdcch *rgSCHCmnPdcchAlloc(cell, subFrm, cqi, dciFrmt, isDtx)
4009 TfuDciFormat dciFrmt;
4013 CmLteAggrLvl aggrLvl;
4014 RgSchPdcchInfo *pdcchInfo;
4017 TRC2(rgSCHCmnPdcchAlloc);
4019 /* 3.1 consider the selected DCI format size in determining the
4020 * aggregation level */
4021 //TODO_SID Need to update. Currently using 4 aggregation level
4022 aggrLvl = CM_LTE_AGGR_LVL2;//cellSch->dciAggrLvl[cqi][dciFrmt];
4025 if((dciFrmt == TFU_DCI_FORMAT_1A) &&
4026 ((ue) && (ue->allocCmnUlPdcch)) )
4028 pdcch = rgSCHCmnCmnPdcchAlloc(cell, subFrm);
4029 /* Since CRNTI Scrambled */
4032 pdcch->dciNumOfBits = ue->dciSize.cmnSize[dciFrmt];
4033 // prc_trace_format_string(PRC_TRACE_GROUP_PS, PRC_TRACE_INFO_LOW,"Forcing alloc in CMN search spc size %d fmt %d \n",
4034 // pdcch->dciNumOfBits, dciFrmt);
4040 /* Incrementing aggrLvl by 1 if it not AGGR_LVL8(MAX SIZE)
4041 * inorder to increse the redudancy bits for better decoding of UE */
4044 if (aggrLvl != CM_LTE_AGGR_LVL16)
4048 case CM_LTE_AGGR_LVL2:
4049 aggrLvl = CM_LTE_AGGR_LVL4;
4051 case CM_LTE_AGGR_LVL4:
4052 aggrLvl = CM_LTE_AGGR_LVL8;
4054 case CM_LTE_AGGR_LVL8:
4055 aggrLvl = CM_LTE_AGGR_LVL16;
4064 pdcchInfo = &subFrm->pdcchInfo;
4066 /* Updating the no. of nCce in pdcchInfo, in case if CFI
4069 if(subFrm->nCce != pdcchInfo->nCce)
4071 rgSCHUtlPdcchInit(cell, subFrm, subFrm->nCce);
4074 if(cell->nCce != pdcchInfo->nCce)
4076 rgSCHUtlPdcchInit(cell, subFrm, cell->nCce);
4080 if (pdcchInfo->nCce < (1 << (aggrLvl - 1)))
4082 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4083 subFrm->isCceFailure = TRUE;
4084 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4085 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4091 if (rgSCHUtlPdcchAvail(cell, pdcchInfo, aggrLvl, &pdcch) == TRUE)
4093 /* SR_RACH_STATS : Reset isTBMsg4 */
4094 pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4= FALSE;
4095 pdcch->dci.u.format0Info.isSrGrant = FALSE;
4097 pdcch->isSpsRnti = FALSE;
4099 /* Increment the CCE used counter in the current subframe */
4100 subFrm->cceCnt += aggrLvl;
4101 pdcch->pdcchSearchSpace = RG_SCH_UE_SPECIFIC_SEARCH_SPACE;
4105 if (ue->cell != cell)
4107 /* Secondary Cell */
4108 //pdcch->dciNumOfBits = ue->dciSize.noUlCcSize[dciFrmt];
4109 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4114 //pdcch->dciNumOfBits = ue->dciSize.dedSize[dciFrmt];
4115 //TODO_SID Need to update dci size.
4116 pdcch->dciNumOfBits = MAX_5GTF_DCIA1B1_SIZE;
4122 pdcch->dciNumOfBits = cell->dciSize.size[dciFrmt];
4127 /* PDCCH Allocation Failed, Mark cceFailure flag as TRUE */
4128 subFrm->isCceFailure = TRUE;
4130 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
4131 "PDCCH ERR: NO PDDCH AVAIL IN UE SEARCH SPACE :aggr(%u)",
4138 * @brief This function implements BW allocation for CCCH SDU
4142 * Function: rgSCHCmnCcchSduDedAlloc
4143 * Purpose: Downlink bandwidth Allocation for CCCH SDU.
4145 * Invoked by: Scheduler
4147 * @param[in] RgSchCellCb* cell
4148 * @param[out] RgSchUeCb *ueCb
4153 PRIVATE S16 rgSCHCmnCcchSduDedAlloc
4159 PRIVATE S16 rgSCHCmnCcchSduDedAlloc(cell, ueCb)
4164 RgSchDlHqEnt *hqE = NULLP;
4166 RgSchDlRbAlloc *rbAllocinfo = NULLP;
4167 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4171 U8 cfi = cellDl->currCfi;
4174 TRC2(rgSCHCmnCcchSduDedAlloc);
4176 rbAllocinfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
4178 effBo = ueCb->dlCcchInfo.bo + RGSCH_CCCH_SDU_HDRSIZE;
4181 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4182 &rbAllocinfo->tbInfo[0].bytesReq,
4183 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4184 #else /* LTEMAC_SPS */
4185 rgSCHCmnClcRbAlloc(cell, effBo, cellDl->ccchCqi, &rbAllocinfo->rbsReq, \
4186 &rbAllocinfo->tbInfo[0].bytesReq,\
4187 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4189 #endif /* LTEMAC_SPS */
4192 /* Cannot exceed the total number of RBs in the cell */
4193 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4194 rbAllocinfo->dlSf->bwAssigned)))
4196 /* Check if atleast one allocation was possible.
4197 This may be the case where the Bw is very less and
4198 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4199 if (rbAllocinfo->dlSf->bwAssigned == 0)
4201 numRb = rbAllocinfo->dlSf->bw;
4202 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4203 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4207 rbAllocinfo->rbsReq = numRb;
4208 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4209 /* DwPTS Scheduling Changes Start */
4211 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4213 rbAllocinfo->tbInfo[0].bytesReq =
4214 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1,cfi);
4217 /* DwPTS Scheduling Changes End */
4218 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4226 /* Update the subframe Allocated BW field */
4227 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4228 rbAllocinfo->rbsReq;
4229 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
4230 rbAllocinfo->tbInfo[0].tbCb = &hqE->ccchSduProc->tbInfo[0];
4231 rbAllocinfo->rnti = ueCb->ueId;
4232 rbAllocinfo->tbInfo[0].noLyr = 1;
4239 * @brief This function implements BW allocation for MSG4
4243 * Function: rgSCHCmnMsg4DedAlloc
4244 * Purpose: Downlink bandwidth Allocation for MSG4.
4246 * Invoked by: Scheduler
4248 * @param[in] RgSchCellCb* cell
4249 * @param[out] RgSchRaCb *raCb
4254 PRIVATE S16 rgSCHCmnMsg4DedAlloc
4260 PRIVATE S16 rgSCHCmnMsg4DedAlloc(cell, raCb)
4266 RgSchDlRbAlloc *rbAllocinfo = &raCb->rbAllocInfo;
4270 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4271 U8 cfi = cellDl->currCfi;
4274 TRC2(rgSCHCmnMsg4DedAlloc);
4276 effBo = raCb->dlCcchInfo.bo + RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE;
4279 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4280 &rbAllocinfo->tbInfo[0].bytesReq,\
4281 &rbAllocinfo->tbInfo[0].imcs, rbAllocinfo->dlSf);
4282 #else /* LTEMAC_SPS */
4283 rgSCHCmnClcRbAlloc(cell, effBo, raCb->ccchCqi, &rbAllocinfo->rbsReq, \
4284 &rbAllocinfo->tbInfo[0].bytesReq,\
4285 &rbAllocinfo->tbInfo[0].imcs, &iTbs, FALSE,
4287 #endif /* LTEMAC_SPS */
4290 /* Cannot exceed the total number of RBs in the cell */
4291 if ((S16)rbAllocinfo->rbsReq > ((S16)(rbAllocinfo->dlSf->bw - \
4292 rbAllocinfo->dlSf->bwAssigned)))
4294 /* Check if atleast one allocation was possible.
4295 This may be the case where the Bw is very less and
4296 with the configured CCCH CQI, CCCH SDU exceeds the min Bw */
4297 if (rbAllocinfo->dlSf->bwAssigned == 0)
4299 numRb = rbAllocinfo->dlSf->bw;
4300 RG_SCH_CMN_DL_MCS_TO_TBS(rbAllocinfo->tbInfo[0].imcs, iTbs);
4301 while (rgTbSzTbl[0][++iTbs][numRb-1]/8 < effBo)
4305 rbAllocinfo->rbsReq = numRb;
4306 rbAllocinfo->tbInfo[0].bytesReq = rgTbSzTbl[0][iTbs][numRb-1]/8;
4307 /* DwPTS Scheduling Changes Start */
4309 if(rbAllocinfo->dlSf->sfType == RG_SCH_SPL_SF_DATA)
4311 rbAllocinfo->tbInfo[0].bytesReq =
4312 rgSCHCmnCalcDwPtsTbSz(cell, effBo, &numRb, &iTbs, 1, cfi);
4315 /* DwPTS Scheduling Changes End */
4316 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, rbAllocinfo->tbInfo[0].imcs);
4324 /* Update the subframe Allocated BW field */
4325 rbAllocinfo->dlSf->bwAssigned = rbAllocinfo->dlSf->bwAssigned + \
4326 rbAllocinfo->rbsReq;
4327 rbAllocinfo->rnti = raCb->tmpCrnti;
4328 rbAllocinfo->tbInfo[0].tbCb = &raCb->dlHqE->msg4Proc->tbInfo[0];
4329 rbAllocinfo->tbInfo[0].schdlngForTb = TRUE;
4330 rbAllocinfo->tbInfo[0].noLyr = 1;
4337 * @brief This function implements scheduling for RA Response.
4341 * Function: rgSCHCmnDlRaRsp
4342 * Purpose: Downlink scheduling for RA responses.
4344 * Invoked by: Scheduler
4346 * @param[in] RgSchCellCb* cell
4351 PRIVATE Void rgSCHCmnDlRaRsp
4354 RgSchCmnDlRbAllocInfo *allocInfo
4357 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4359 RgSchCmnDlRbAllocInfo *allocInfo;
4362 CmLteTimingInfo frm;
4363 CmLteTimingInfo schFrm;
4369 RgSchTddRachRspLst *rachRsp;
4370 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
4374 TRC2(rgSCHCmnDlRaRsp);
4376 frm = cell->crntTime;
4377 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4379 /* Compute the subframe for which allocation is being made */
4380 /* essentially, we need pointer to the dl frame for this subframe */
4381 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4383 /* Get the RACH Response scheduling related information
4384 * for the subframe with RA index */
4385 raIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][frm.subframe]-1;
4387 rachRsp = &cell->rachRspLst[raIdx];
4389 for(sfnIdx = 0; sfnIdx < rachRsp->numRadiofrms; sfnIdx++)
4391 /* For all scheduled RACH Responses in SFNs */
4393 RG_SCH_CMN_DECR_FRAME(schFrm.sfn, rachRsp->rachRsp[sfnIdx].sfnOffset);
4394 /* For all scheduled RACH Responses in subframes */
4396 subfrmIdx < rachRsp->rachRsp[sfnIdx].numSubfrms; subfrmIdx++)
4398 schFrm.subframe = rachRsp->rachRsp[sfnIdx].subframe[subfrmIdx];
4399 /* compute the last RA RNTI used in the previous subframe */
4400 raIdx = (((schFrm.sfn % cell->raInfo.maxRaSize) * \
4401 RGSCH_NUM_SUB_FRAMES * RGSCH_MAX_RA_RNTI_PER_SUBFRM) \
4404 /* For all RA RNTIs within a subframe */
4406 for(i=0; (i < RGSCH_MAX_RA_RNTI_PER_SUBFRM) && \
4407 (noRaRnti < RGSCH_MAX_TDD_RA_RSP_ALLOC); i++)
4409 rarnti = (schFrm.subframe + RGSCH_NUM_SUB_FRAMES*i + 1);
4410 rntiIdx = (raIdx + RGSCH_NUM_SUB_FRAMES*i);
4412 if (cell->raInfo.raReqLst[rntiIdx].first != NULLP)
4414 /* compute the next RA RNTI */
4415 if (rgSCHCmnRaRspAlloc(cell, subFrm, rntiIdx,
4416 rarnti, noRaRnti, allocInfo) != ROK)
4418 /* The resources are exhausted */
4432 * @brief This function implements scheduling for RA Response.
4436 * Function: rgSCHCmnDlRaRsp
4437 * Purpose: Downlink scheduling for RA responses.
4439 * Invoked by: Scheduler
4441 * @param[in] RgSchCellCb* cell
4442 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4447 PRIVATE Void rgSCHCmnDlRaRsp //FDD
4450 RgSchCmnDlRbAllocInfo *allocInfo
4453 PRIVATE Void rgSCHCmnDlRaRsp(cell, allocInfo)
4455 RgSchCmnDlRbAllocInfo *allocInfo;
4458 CmLteTimingInfo frm;
4459 CmLteTimingInfo winStartFrm;
4465 RgSchCmnCell *sched;
4467 TRC2(rgSCHCmnDlRaRsp);
4469 frm = cell->crntTime;
4470 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
4472 /* Compute the subframe for which allocation is being made */
4473 /* essentially, we need pointer to the dl frame for this subframe */
4474 subFrm = rgSCHUtlSubFrmGet(cell, frm);
4475 sched = RG_SCH_CMN_GET_CELL(cell);
4477 /* ccpu00132523 - Window Start calculated by considering RAR window size,
4478 * RAR Wait period, Subframes occuppied for respective preamble format*/
4479 winGap = (sched->dl.numRaSubFrms-1) + (cell->rachCfg.raWinSize-1)
4480 +RGSCH_RARSP_WAIT_PERIOD;
4482 /* Window starting occassion is retrieved using the gap and tried to
4483 * fit to the size of raReqLst array*/
4484 RGSCHDECRFRMCRNTTIME(frm, winStartFrm, winGap);
4486 //5G_TODO TIMING update. Need to check
4487 winStartIdx = (winStartFrm.sfn & 1) * RGSCH_MAX_RA_RNTI+ winStartFrm.slot;
4489 for(i = 0; ((i < cell->rachCfg.raWinSize) && (noRaRnti < RG_SCH_CMN_MAX_CMN_PDCCH)); i++)
4491 raIdx = (winStartIdx + i) % RGSCH_RAREQ_ARRAY_SIZE;
4493 if (cell->raInfo.raReqLst[raIdx].first != NULLP)
4495 allocInfo->raRspAlloc[noRaRnti].biEstmt = \
4496 (!i * RGSCH_ONE_BIHDR_SIZE);
4497 rarnti = raIdx % RGSCH_MAX_RA_RNTI+ 1;
4498 if (rgSCHCmnRaRspAlloc(cell, subFrm, raIdx,
4499 rarnti, noRaRnti, allocInfo) != ROK)
4501 /* The resources are exhausted */
4504 /* ccpu00132523- If all the RAP ids are not scheduled then need not
4505 * proceed for next RA RNTIs*/
4506 if(allocInfo->raRspAlloc[noRaRnti].numRapids < cell->raInfo.raReqLst[raIdx].count)
4510 noRaRnti++; /* Max of RG_SCH_CMN_MAX_CMN_PDCCH RARNTIs
4511 for response allocation */
4520 * @brief This function allocates the resources for an RARNTI.
4524 * Function: rgSCHCmnRaRspAlloc
4525 * Purpose: Allocate resources to a RARNTI.
4526 * 0. Allocate PDCCH for sending the response.
4527 * 1. Locate the number of RA requests pending for the RARNTI.
4528 * 2. Compute the size of data to be built.
4529 * 3. Using common channel CQI, compute the number of RBs.
4531 * Invoked by: Scheduler
4533 * @param[in] RgSchCellCb *cell,
4534 * @param[in] RgSchDlSf *subFrm,
4535 * @param[in] U16 rarnti,
4536 * @param[in] U8 noRaRnti
4537 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
4542 PRIVATE S16 rgSCHCmnRaRspAlloc
4549 RgSchCmnDlRbAllocInfo *allocInfo
4552 PRIVATE S16 rgSCHCmnRaRspAlloc(cell,subFrm,raIndex,rarnti,noRaRnti,allocInfo)
4558 RgSchCmnDlRbAllocInfo *allocInfo;
4561 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4562 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4566 /*ccpu00116700,ccpu00116708- Corrected the wrong type for mcs*/
4569 /* RACH handling related changes */
4570 Bool isAlloc = FALSE;
4571 static U8 schdNumRapid = 0;
4577 U8 cfi = cellDl->currCfi;
4580 TRC2(rgSCHCmnRaRspAlloc);
4585 /* ccpu00132523: Resetting the schdRap Id count in every scheduling subframe*/
4592 if (subFrm->bw == subFrm->bwAssigned)
4594 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4595 "bw == bwAssigned RARNTI:%d",rarnti);
4599 reqLst = &cell->raInfo.raReqLst[raIndex];
4600 if (reqLst->count == 0)
4602 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4603 "reqLst Count=0 RARNTI:%d",rarnti);
4606 remNumRapid = reqLst->count;
4609 /* Limit number of rach rsps to maxMsg3PerUlsf */
4610 if ( schdNumRapid+remNumRapid > cellUl->maxMsg3PerUlSf )
4612 remNumRapid = cellUl->maxMsg3PerUlSf-schdNumRapid;
4618 /* Try allocating for as many RAPIDs as possible */
4619 /* BI sub-header size to the tbSize requirement */
4620 noBytes = RGSCH_GET_RAR_BYTES(remNumRapid) +\
4621 allocInfo->raRspAlloc[noRaRnti].biEstmt;
4622 if ((allwdTbSz = rgSCHUtlGetAllwdCchTbSz(noBytes*8, &nPrb, &mcs)) == -1)
4628 /* rgSCHCmnClcRbAllocForFxdTb(cell, allwdTbSz/8, cellDl->ccchCqi, &rb);*/
4629 if(cellDl->bitsPerRb==0)
4631 while ((rgTbSzTbl[0][0][rb]) <(U32) allwdTbSz)
4639 rb = RGSCH_CEIL(allwdTbSz, cellDl->bitsPerRb);
4641 /* DwPTS Scheduling Changes Start */
4643 if (subFrm->sfType == RG_SCH_SPL_SF_DATA)
4645 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
4647 /* Calculate the less RE's because of DwPTS */
4648 lostRe = rb * (cellDl->noResPerRb[cfi] -
4649 cellDl->numReDwPts[cfi]);
4651 /* Increase number of RBs in Spl SF to compensate for lost REs */
4652 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
4655 /* DwPTS Scheduling Changes End */
4657 /*ccpu00115595- end*/
4658 if (rb > subFrm->bw - subFrm->bwAssigned)
4663 /* Allocation succeeded for 'remNumRapid' */
4666 printf("\n!!!RAR alloc noBytes:%u,allwdTbSz:%u,tbs:%u,rb:%u\n",
4667 noBytes,allwdTbSz,tbs,rb);
4672 RLOG_ARG0(L_INFO,DBG_CELLID,cell->cellId,"BW alloc Failed");
4676 subFrm->bwAssigned = subFrm->bwAssigned + rb;
4678 /* Fill AllocInfo structure */
4679 allocInfo->raRspAlloc[noRaRnti].rnti = rarnti;
4680 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].bytesReq = tbs;
4681 allocInfo->raRspAlloc[noRaRnti].rbsReq = rb;
4682 allocInfo->raRspAlloc[noRaRnti].dlSf = subFrm;
4683 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].imcs = mcs;
4684 allocInfo->raRspAlloc[noRaRnti].raIndex = raIndex;
4685 /* RACH changes for multiple RAPID handling */
4686 allocInfo->raRspAlloc[noRaRnti].numRapids = remNumRapid;
4687 allocInfo->raRspAlloc[noRaRnti].nPrb = nPrb;
4688 allocInfo->raRspAlloc[noRaRnti].tbInfo[0].noLyr = 1;
4689 allocInfo->raRspAlloc[noRaRnti].vrbgReq = RGSCH_CEIL(nPrb,MAX_5GTF_VRBG_SIZE);
4690 schdNumRapid += remNumRapid;
4694 /***********************************************************
4696 * Func : rgSCHCmnUlAllocFillRbInfo
4698 * Desc : Fills the start RB and the number of RBs for
4699 * uplink allocation.
4707 **********************************************************/
4709 PUBLIC Void rgSCHCmnUlAllocFillRbInfo
4716 PUBLIC Void rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc)
4719 RgSchUlAlloc *alloc;
4722 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4723 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4724 U8 cfi = cellDl->currCfi;
4727 TRC2(rgSCHCmnUlAllocFillRbInfo);
4728 alloc->grnt.rbStart = (alloc->sbStart * cellUl->sbSize) +
4729 cell->dynCfiCb.bwInfo[cfi].startRb;
4731 /* Num RBs = numSbAllocated * sbSize - less RBs in the last SB */
4732 alloc->grnt.numRb = (alloc->numSb * cellUl->sbSize);
4738 * @brief Grant request for Msg3.
4742 * Function : rgSCHCmnMsg3GrntReq
4744 * This is invoked by downlink scheduler to request allocation
4747 * - Attempt to allocate msg3 in the current msg3 subframe
4748 * Allocation attempt based on whether preamble is from group A
4749 * and the value of MESSAGE_SIZE_GROUP_A
4750 * - Link allocation with passed RNTI and msg3 HARQ process
4751 * - Set the HARQ process ID (*hqProcIdRef)
4753 * @param[in] RgSchCellCb *cell
4754 * @param[in] CmLteRnti rnti
4755 * @param[in] Bool preamGrpA
4756 * @param[in] RgSchUlHqProcCb *hqProc
4757 * @param[out] RgSchUlAlloc **ulAllocRef
4758 * @param[out] U8 *hqProcIdRef
4762 PRIVATE Void rgSCHCmnMsg3GrntReq
4767 RgSchUlHqProcCb *hqProc,
4768 RgSchUlAlloc **ulAllocRef,
4772 PRIVATE Void rgSCHCmnMsg3GrntReq(cell, rnti, preamGrpA, hqProc,
4773 ulAllocRef, hqProcIdRef)
4777 RgSchUlHqProcCb *hqProc;
4778 RgSchUlAlloc **ulAllocRef;
4782 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
4783 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
4785 RgSchUlAlloc *alloc;
4789 TRC2(rgSCHCmnMsg3GrntReq);
4791 *ulAllocRef = NULLP;
4793 /* Fix: ccpu00120610 Use remAllocs from subframe during msg3 allocation */
4794 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
4798 if (preamGrpA == FALSE)
4800 numSb = cellUl->ra.prmblBNumSb;
4801 iMcs = cellUl->ra.prmblBIMcs;
4805 numSb = cellUl->ra.prmblANumSb;
4806 iMcs = cellUl->ra.prmblAIMcs;
4809 if ((hole = rgSCHUtlUlHoleFirst(sf)) != NULLP)
4811 if(*sf->allocCountRef == 0)
4813 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
4814 /* Reinitialize the hole */
4815 if (sf->holeDb->count == 1 && (hole->start == 0)) /* Sanity check of holeDb */
4817 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4818 /* Re-Initialize available subbands because of CFI change*/
4819 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
4823 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
4824 "Error! holeDb sanity check failed RNTI:%d",rnti);
4827 if (numSb <= hole->num)
4830 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
4831 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
4832 alloc->grnt.iMcs = iMcs;
4833 alloc->grnt.iMcsCrnt = iMcs;
4834 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
4835 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
4836 /* To include the length and ModOrder in DataRecp Req.*/
4837 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
4838 RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
4839 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
4840 alloc->grnt.nDmrs = 0;
4841 alloc->grnt.hop = 0;
4842 alloc->grnt.delayBit = 0;
4843 alloc->grnt.isRtx = FALSE;
4844 *ulAllocRef = alloc;
4845 *hqProcIdRef = (cellUl->msg3SchdHqProcIdx);
4846 hqProc->procId = *hqProcIdRef;
4847 hqProc->ulSfIdx = (cellUl->msg3SchdIdx);
4850 alloc->pdcch = FALSE;
4851 alloc->forMsg3 = TRUE;
4852 alloc->hqProc = hqProc;
4853 rgSCHUhmNewTx(hqProc, (U8)(cell->rachCfg.maxMsg3Tx - 1), alloc);
4854 //RLOG_ARG4(L_DEBUG,DBG_CELLID,cell->cellId,
4856 "\nRNTI:%d MSG3 ALLOC proc(%lu)procId(%d)schdIdx(%d)\n",
4858 ((PTR)alloc->hqProc),
4859 alloc->hqProc->procId,
4860 alloc->hqProc->ulSfIdx);
4861 RLOG_ARG2(L_DEBUG,DBG_CELLID,cell->cellId,
4862 "alloc(%p)maxMsg3Tx(%d)",
4864 cell->rachCfg.maxMsg3Tx);
4873 * @brief This function determines the allocation limits and
4874 * parameters that aid in DL scheduling.
4878 * Function: rgSCHCmnDlSetUeAllocLmt
4879 * Purpose: This function determines the Maximum RBs
4880 * a UE is eligible to get based on softbuffer
4881 * limitation and cell->>>maxDlBwPerUe. The Codeword
4882 * specific parameters like iTbs, eff and noLyrs
4883 * are also set in this function. This function
4884 * is called while UE configuration and UeDlCqiInd.
4886 * Invoked by: Scheduler
4888 * @param[in] RgSchCellCb *cellCb
4889 * @param[in] RgSchCmnDlUe *ueDl
4894 PRIVATE Void rgSCHCmnDlSetUeAllocLmt
4901 PRIVATE Void rgSCHCmnDlSetUeAllocLmt(cell, ueDl, isEmtcUe)
4909 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
4910 U8 cfi = cellSch->dl.currCfi;
4912 TRC2(rgSCHCmnDlSetUeAllocLmt);
4915 if(TRUE == isEmtcUe)
4917 /* ITbs for CW0 for 1 Layer Tx */
4918 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4919 [ueDl->mimoInfo.cwInfo[0].cqi];
4920 /* ITbs for CW0 for 2 Layer Tx */
4921 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4922 [ueDl->mimoInfo.cwInfo[0].cqi];
4923 /* Eff for CW0 for 1 Layer Tx */
4924 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4925 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4926 /* Eff for CW0 for 2 Layer Tx */
4927 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4928 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4930 /* ITbs for CW1 for 1 Layer Tx */
4931 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[0][cfi]))\
4932 [ueDl->mimoInfo.cwInfo[1].cqi];
4933 /* ITbs for CW1 for 2 Layer Tx */
4934 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchEmtcCmnCqiToTbs *)(cellSch->dl.emtcCqiToTbsTbl[1][cfi]))\
4935 [ueDl->mimoInfo.cwInfo[1].cqi];
4936 /* Eff for CW1 for 1 Layer Tx */
4937 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4938 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4939 /* Eff for CW1 for 2 Layer Tx */
4940 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4941 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4946 /* ITbs for CW0 for 1 Layer Tx */
4947 ueDl->mimoInfo.cwInfo[0].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4948 [ueDl->mimoInfo.cwInfo[0].cqi];
4949 /* ITbs for CW0 for 2 Layer Tx */
4950 ueDl->mimoInfo.cwInfo[0].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4951 [ueDl->mimoInfo.cwInfo[0].cqi];
4952 /* Eff for CW0 for 1 Layer Tx */
4953 ueDl->mimoInfo.cwInfo[0].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4954 [ueDl->mimoInfo.cwInfo[0].iTbs[0]];
4955 /* Eff for CW0 for 2 Layer Tx */
4956 ueDl->mimoInfo.cwInfo[0].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4957 [ueDl->mimoInfo.cwInfo[0].iTbs[1]];
4959 /* ITbs for CW1 for 1 Layer Tx */
4960 ueDl->mimoInfo.cwInfo[1].iTbs[0] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
4961 [ueDl->mimoInfo.cwInfo[1].cqi];
4962 /* ITbs for CW1 for 2 Layer Tx */
4963 ueDl->mimoInfo.cwInfo[1].iTbs[1] = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[1][cfi]))\
4964 [ueDl->mimoInfo.cwInfo[1].cqi];
4965 /* Eff for CW1 for 1 Layer Tx */
4966 ueDl->mimoInfo.cwInfo[1].eff[0] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[0][cfi]))\
4967 [ueDl->mimoInfo.cwInfo[1].iTbs[0]];
4968 /* Eff for CW1 for 2 Layer Tx */
4969 ueDl->mimoInfo.cwInfo[1].eff[1] = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[1][cfi]))\
4970 [ueDl->mimoInfo.cwInfo[1].iTbs[1]];
4974 // ueDl->laCb.cqiBasediTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0] * 100;
4976 /* Assigning noLyrs to each CW assuming optimal Spatial multiplexing
4978 (ueDl->mimoInfo.ri/2 == 0)? (ueDl->mimoInfo.cwInfo[0].noLyr = 1) : \
4979 (ueDl->mimoInfo.cwInfo[0].noLyr = ueDl->mimoInfo.ri/2);
4980 ueDl->mimoInfo.cwInfo[1].noLyr = ueDl->mimoInfo.ri - ueDl->mimoInfo.cwInfo[0].noLyr;
4981 /* rg002.101:ccpu00102106: correcting DL harq softbuffer limitation logic.
4982 * The maxTbSz is the maximum number of PHY bits a harq process can
4983 * hold. Hence we limit our allocation per harq process based on this.
4984 * Earlier implementation we misinterpreted the maxTbSz to be per UE
4985 * per TTI, but in fact it is per Harq per TTI. */
4986 /* rg002.101:ccpu00102106: cannot exceed the harq Tb Size
4987 * and harq Soft Bits limit.*/
4989 /* Considering iTbs corresponding to 2 layer transmission for
4990 * codeword0(approximation) and the maxLayers supported by
4991 * this UE at this point of time. */
4992 RG_SCH_CMN_TBS_TO_MODODR(ueDl->mimoInfo.cwInfo[0].iTbs[1], modOrder);
4994 /* Bits/modOrder gives #REs, #REs/noResPerRb gives #RBs */
4995 /* rg001.301 -MOD- [ccpu00119213] : avoiding wraparound */
4996 maxRb = ((ueDl->maxSbSz)/(cellSch->dl.noResPerRb[cfi] * modOrder *\
4997 ueDl->mimoInfo.ri));
4998 if (cellSch->dl.isDlFreqSel)
5000 /* Rounding off to left nearest multiple of RBG size */
5001 maxRb -= maxRb % cell->rbgSize;
5003 ueDl->maxRb = RGSCH_MIN(maxRb, cellSch->dl.maxDlBwPerUe);
5004 if (cellSch->dl.isDlFreqSel)
5006 /* Rounding off to right nearest multiple of RBG size */
5007 if (ueDl->maxRb % cell->rbgSize)
5009 ueDl->maxRb += (cell->rbgSize -
5010 (ueDl->maxRb % cell->rbgSize));
5014 /* Set the index of the cwInfo, which is better in terms of
5015 * efficiency. If RI<2, only 1 CW, hence btrCwIdx shall be 0 */
5016 if (ueDl->mimoInfo.ri < 2)
5018 ueDl->mimoInfo.btrCwIdx = 0;
5022 if (ueDl->mimoInfo.cwInfo[0].eff[ueDl->mimoInfo.cwInfo[0].noLyr-1] <\
5023 ueDl->mimoInfo.cwInfo[1].eff[ueDl->mimoInfo.cwInfo[1].noLyr-1])
5025 ueDl->mimoInfo.btrCwIdx = 1;
5029 ueDl->mimoInfo.btrCwIdx = 0;
5039 * @brief This function updates TX Scheme.
5043 * Function: rgSCHCheckAndSetTxScheme
5044 * Purpose: This function determines the Maximum RBs
5045 * a UE is eligible to get based on softbuffer
5046 * limitation and cell->>>maxDlBwPerUe. The Codeword
5047 * specific parameters like iTbs, eff and noLyrs
5048 * are also set in this function. This function
5049 * is called while UE configuration and UeDlCqiInd.
5051 * Invoked by: Scheduler
5053 * @param[in] RgSchCellCb *cell
5054 * @param[in] RgSchUeCb *ue
5059 PRIVATE Void rgSCHCheckAndSetTxScheme
5065 PRIVATE Void rgSCHCheckAndSetTxScheme(cell, ue)
5070 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5071 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue ,cell);
5072 U8 cfi = cellSch->dl.currCfi;
5077 TRC2(rgSCHCheckAndSetTxScheme);
5079 maxiTbs = (*(RgSchCmnCqiToTbs*)(cellSch->dl.cqiToTbsTbl[0][cfi]))\
5080 [RG_SCH_CMN_MAX_CQI - 1];
5081 cqiBasediTbs = (ueDl->laCb[0].cqiBasediTbs)/100;
5082 actualiTbs = ueDl->mimoInfo.cwInfo[0].iTbs[0];
5084 if((actualiTbs < RG_SCH_TXSCHEME_CHNG_ITBS_FACTOR) && (cqiBasediTbs >
5085 actualiTbs) && ((cqiBasediTbs - actualiTbs) > RG_SCH_TXSCHEME_CHNG_THRSHD))
5087 RG_SCH_CMN_SET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5090 if(actualiTbs >= maxiTbs)
5092 RG_SCH_CMN_UNSET_FORCE_TD(ue,cell, RG_SCH_CMN_TD_TXSCHEME_CHNG);
5099 * @brief This function determines the allocation limits and
5100 * parameters that aid in DL scheduling.
5104 * Function: rgSCHCmnDlSetUeAllocLmtLa
5105 * Purpose: This function determines the Maximum RBs
5106 * a UE is eligible to get based on softbuffer
5107 * limitation and cell->>>maxDlBwPerUe. The Codeword
5108 * specific parameters like iTbs, eff and noLyrs
5109 * are also set in this function. This function
5110 * is called while UE configuration and UeDlCqiInd.
5112 * Invoked by: Scheduler
5114 * @param[in] RgSchCellCb *cell
5115 * @param[in] RgSchUeCb *ue
5120 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa
5126 PUBLIC Void rgSCHCmnDlSetUeAllocLmtLa(cell, ue)
5134 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
5135 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
5136 U8 cfi = cellSch->dl.currCfi;
5140 TRC2(rgSCHCmnDlSetUeAllocLmtLa);
5142 maxiTbs = (*(RgSchCmnCqiToTbs *)(cellSch->dl.cqiToTbsTbl[0][cfi]))[RG_SCH_CMN_MAX_CQI - 1];
5143 if(ueDl->cqiFlag == TRUE)
5145 for(cwIdx=0; cwIdx < RG_SCH_CMN_MAX_CW_PER_UE; cwIdx++)
5149 /* Calcluating the reported iTbs for code word 0 */
5150 reportediTbs = ue->ue5gtfCb.mcs;
5152 iTbsNew = (S32) reportediTbs;
5154 if(!ueDl->laCb[cwIdx].notFirstCqi)
5156 /* This is the first CQI report from UE */
5157 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5158 ueDl->laCb[cwIdx].notFirstCqi = TRUE;
5160 else if ((RG_ITBS_DIFF(reportediTbs, ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0])) > 5)
5162 /* Ignore this iTBS report and mark that last iTBS report was */
5163 /* ignored so that subsequently we reset the LA algorithm */
5164 ueDl->laCb[cwIdx].lastiTbsIgnored = TRUE;
5165 ueDl->laCb[cwIdx].numLastiTbsIgnored++;
5166 if( ueDl->laCb[cwIdx].numLastiTbsIgnored > 10)
5168 /* CQI reported by UE is not catching up. Reset the LA algorithm */
5169 ueDl->laCb[cwIdx].cqiBasediTbs = (iTbsNew * 100);
5170 ueDl->laCb[cwIdx].deltaiTbs = 0;
5171 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5172 ueDl->laCb[cwIdx].numLastiTbsIgnored = 0;
5177 if (ueDl->laCb[cwIdx].lastiTbsIgnored != TRUE)
5179 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5180 (80 * ueDl->laCb[cwIdx].cqiBasediTbs))/100;
5184 /* Reset the LA as iTbs in use caught up with the value */
5185 /* reported by UE. */
5186 ueDl->laCb[cwIdx].cqiBasediTbs = ((20 * iTbsNew * 100) +
5187 (80 * ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] * 100))/100;
5188 ueDl->laCb[cwIdx].deltaiTbs = 0;
5189 ueDl->laCb[cwIdx].lastiTbsIgnored = FALSE;
5193 iTbsNew = (ueDl->laCb[cwIdx].cqiBasediTbs + ueDl->laCb[cwIdx].deltaiTbs)/100;
5195 RG_SCH_CHK_ITBS_RANGE(iTbsNew, maxiTbs);
5197 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0] = RGSCH_MIN(iTbsNew, cell->thresholds.maxDlItbs);
5198 //ueDl->mimoInfo.cwInfo[cwIdx].iTbs[1] = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5200 ue->ue5gtfCb.mcs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[0];
5202 printf("reportediTbs[%d] cqiBasediTbs[%d] deltaiTbs[%d] iTbsNew[%d] mcs[%d] cwIdx[%d]\n",
5203 reportediTbs, ueDl->laCb[cwIdx].cqiBasediTbs, ueDl->laCb[cwIdx].deltaiTbs,
5204 iTbsNew, ue->ue5gtfCb.mcs, cwIdx);
5208 if((ue->mimoInfo.txMode != RGR_UE_TM_3) && (ue->mimoInfo.txMode != RGR_UE_TM_4))
5213 ueDl->cqiFlag = FALSE;
5220 /***********************************************************
5222 * Func : rgSCHCmnDlUeResetTemp
5224 * Desc : Reset whatever variables where temporarily used
5225 * during UE scheduling.
5233 **********************************************************/
5235 PUBLIC Void rgSCHCmnDlHqPResetTemp
5237 RgSchDlHqProcCb *hqP
5240 PUBLIC Void rgSCHCmnDlHqPResetTemp(hqP)
5241 RgSchDlHqProcCb *hqP;
5245 TRC2(rgSCHCmnDlHqPResetTemp);
5247 /* Fix: syed having a hqP added to Lists for RB assignment rather than
5248 * a UE, as adding UE was limiting handling some scenarios */
5249 hqP->reqLnk.node = (PTR)NULLP;
5250 hqP->schdLstLnk.node = (PTR)NULLP;
5253 } /* rgSCHCmnDlHqPResetTemp */
5255 /***********************************************************
5257 * Func : rgSCHCmnDlUeResetTemp
5259 * Desc : Reset whatever variables where temporarily used
5260 * during UE scheduling.
5268 **********************************************************/
5270 PUBLIC Void rgSCHCmnDlUeResetTemp
5273 RgSchDlHqProcCb *hqP
5276 PUBLIC Void rgSCHCmnDlUeResetTemp(ue, hqP)
5278 RgSchDlHqProcCb *hqP;
5281 RgSchDlRbAlloc *allocInfo;
5282 RgSchCmnDlUe *cmnUe = RG_SCH_CMN_GET_DL_UE(ue,hqP->hqE->cell);
5287 TRC2(rgSCHCmnDlUeResetTemp);
5289 /* Fix : syed check for UE's existence was useless.
5290 * Instead we need to check that reset is done only for the
5291 * information of a scheduled harq proc, which is cmnUe->proc.
5292 * Reset should not be done for non-scheduled hqP */
5293 if((cmnUe->proc == hqP) || (cmnUe->proc == NULLP))
5295 cmnUe->proc = NULLP;
5296 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, hqP->hqE->cell);
5298 tmpCb = allocInfo->laaCb;
5300 cmMemset((U8 *)allocInfo, (U8)0, sizeof(RgSchDlRbAlloc));
5301 allocInfo->rnti = ue->ueId;
5303 allocInfo->laaCb = tmpCb;
5305 /* Fix: syed moving this to a common function for both scheduled
5306 * and non-scheduled UEs */
5307 cmnUe->outStndAlloc = 0;
5309 rgSCHCmnDlHqPResetTemp(hqP);
5312 } /* rgSCHCmnDlUeResetTemp */
5314 /***********************************************************
5316 * Func : rgSCHCmnUlUeResetTemp
5318 * Desc : Reset whatever variables where temporarily used
5319 * during UE scheduling.
5327 **********************************************************/
5329 PUBLIC Void rgSCHCmnUlUeResetTemp
5335 PUBLIC Void rgSCHCmnUlUeResetTemp(cell, ue)
5340 RgSchCmnUlUe *cmnUlUe = RG_SCH_CMN_GET_UL_UE(ue,cell);
5342 TRC2(rgSCHCmnUlUeResetTemp);
5344 cmMemset((U8 *)&cmnUlUe->alloc, (U8)0, sizeof(cmnUlUe->alloc));
5347 } /* rgSCHCmnUlUeResetTemp */
5352 * @brief This function fills the PDCCH information from dlProc.
5356 * Function: rgSCHCmnFillPdcch
5357 * Purpose: This function fills in the PDCCH information
5358 * obtained from the RgSchDlRbAlloc
5359 * during common channel scheduling(P, SI, RA - RNTI's).
5361 * Invoked by: Downlink Scheduler
5363 * @param[out] RgSchPdcch* pdcch
5364 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5369 PUBLIC Void rgSCHCmnFillPdcch
5373 RgSchDlRbAlloc *rbAllocInfo
5376 PUBLIC Void rgSCHCmnFillPdcch(cell, pdcch, rbAllocInfo)
5379 RgSchDlRbAlloc *rbAllocInfo;
5383 TRC2(rgSCHCmnFillPdcch);
5385 /* common channel pdcch filling,
5386 * only 1A and Local is supported */
5387 pdcch->rnti = rbAllocInfo->rnti;
5388 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
5389 switch(rbAllocInfo->dciFormat)
5391 #ifdef RG_5GTF /* ANOOP: ToDo: DCI format B1/B2 filling */
5392 case TFU_DCI_FORMAT_B1:
5395 pdcch->dci.u.formatB1Info.formatType = 0;
5396 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].cmnGrnt.xPDSCHRange;
5397 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].cmnGrnt.rbAssign;
5398 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = 0;
5399 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5400 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = 0;
5401 //pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].ndi;
5402 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].cmnGrnt.rv;
5403 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5404 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5405 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5406 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5407 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5408 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5409 //TODO_SID: Need to update
5410 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5411 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5412 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5413 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5414 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5415 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5416 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].cmnGrnt.SCID;
5417 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5418 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5419 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5421 break; /* case TFU_DCI_FORMAT_B1: */
5424 case TFU_DCI_FORMAT_B2:
5426 //printf(" RG_5GTF:: Pdcch filling with DCI format B2\n");
5428 break; /* case TFU_DCI_FORMAT_B2: */
5431 case TFU_DCI_FORMAT_1A:
5432 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
5434 /*Nprb indication at PHY for common Ch
5435 *setting least significant bit of tpc field to 1 if
5436 nPrb=3 and 0 otherwise. */
5437 if (rbAllocInfo->nPrb == 3)
5439 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 1;
5443 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = 0;
5445 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
5446 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
5447 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
5448 rbAllocInfo->tbInfo[0].imcs;
5449 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = 0;
5450 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = 0;
5452 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
5454 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
5455 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
5456 rbAllocInfo->allocInfo.raType2.rbStart,
5457 rbAllocInfo->allocInfo.raType2.numRb);
5460 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = \
5463 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
5464 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
5467 break; /* case TFU_DCI_FORMAT_1A: */
5468 case TFU_DCI_FORMAT_1:
5469 pdcch->dci.u.format1Info.tpcCmd = 0;
5470 /* Avoiding this check,as we dont support Type1 RA */
5472 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
5475 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
5476 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
5477 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
5479 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
5480 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
5482 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
5483 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
5485 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
5486 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
5490 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
5491 pdcch->dci.u.format1Info.allocInfo.ndi = 0;
5492 pdcch->dci.u.format1Info.allocInfo.mcs = rbAllocInfo->tbInfo[0].imcs;
5493 pdcch->dci.u.format1Info.allocInfo.rv = 0;
5495 pdcch->dci.u.format1Info.dai = 1;
5499 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Allocator's icorrect "
5500 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
5508 * @brief This function finds whether the subframe is special subframe or not.
5512 * Function: rgSCHCmnIsSplSubfrm
5513 * Purpose: This function finds the subframe index of the special subframe
5514 * and finds whether the current DL index matches it or not.
5516 * Invoked by: Scheduler
5518 * @param[in] U8 splfrmCnt
5519 * @param[in] U8 curSubfrmIdx
5520 * @param[in] U8 periodicity
5521 * @param[in] RgSchTddSubfrmInfo *subfrmInfo
5526 PRIVATE Bool rgSCHCmnIsSplSubfrm
5531 RgSchTddSubfrmInfo *subfrmInfo
5534 PRIVATE Bool rgSCHCmnIsSplSubfrm(splfrmCnt, curSubfrmIdx, periodicity, subfrmInfo)
5538 RgSchTddSubfrmInfo *subfrmInfo;
5544 TRC2(rgSCHCmnIsSplSubfrm);
5548 if(periodicity == RG_SCH_CMN_5_MS_PRD)
5552 dlSfCnt = ((splfrmCnt-1)/2) *\
5553 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5554 dlSfCnt = dlSfCnt + subfrmInfo->numFrmHf1;
5558 dlSfCnt = (splfrmCnt/2) * \
5559 (subfrmInfo->numFrmHf1 + subfrmInfo->numFrmHf2);
5564 dlSfCnt = splfrmCnt * subfrmInfo->numFrmHf1;
5566 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1 +\
5567 (periodicity*splfrmCnt - dlSfCnt);
5571 splfrmIdx = RG_SCH_CMN_SPL_SUBFRM_1;
5574 if(splfrmIdx == curSubfrmIdx)
5583 * @brief This function updates DAI or UL index.
5587 * Function: rgSCHCmnUpdHqAndDai
5588 * Purpose: Updates the DAI based on UL-DL Configuration
5589 * index and UE. It also updates the HARQ feedback
5590 * time and 'm' index.
5594 * @param[in] RgDlHqProcCb *hqP
5595 * @param[in] RgSchDlSf *subFrm
5596 * @param[in] RgSchDlHqTbCb *tbCb
5597 * @param[in] U8 tbAllocIdx
5602 PRIVATE Void rgSCHCmnUpdHqAndDai
5604 RgSchDlHqProcCb *hqP,
5606 RgSchDlHqTbCb *tbCb,
5610 PRIVATE Void rgSCHCmnUpdHqAndDai(hqP, subFrm, tbCb,tbAllocIdx)
5611 RgSchDlHqProcCb *hqP;
5613 RgSchDlHqTbCb *tbCb;
5617 RgSchUeCb *ue = hqP->hqE->ue;
5619 TRC2(rgSCHCmnUpdHqAndDai);
5623 /* set the time at which UE shall send the feedback
5624 * for this process */
5625 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5626 subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5627 tbCb->fdbkTime.subframe = subFrm->dlFdbkInfo.subframe;
5628 tbCb->m = subFrm->dlFdbkInfo.m;
5632 /* set the time at which UE shall send the feedback
5633 * for this process */
5634 tbCb->fdbkTime.sfn = (tbCb->timingInfo.sfn + \
5635 hqP->subFrm->dlFdbkInfo.sfnOffset) % RGSCH_MAX_SFN;
5636 tbCb->fdbkTime.subframe = hqP->subFrm->dlFdbkInfo.subframe;
5637 tbCb->m = hqP->subFrm->dlFdbkInfo.m;
5640 /* ccpu00132340-MOD- DAI need to be updated for first TB only*/
5641 if(ue && !tbAllocIdx)
5643 Bool havePdcch = (tbCb->hqP->pdcch ? TRUE : FALSE);
5646 dlDai = rgSCHCmnUpdDai(ue, &tbCb->fdbkTime, tbCb->m, havePdcch,tbCb->hqP,
5649 {/* Non SPS occasions */
5650 tbCb->hqP->pdcch->dlDai = dlDai;
5651 /* hqP->ulDai is used for N1 resource filling
5652 * when SPS occaions present in a bundle */
5653 tbCb->hqP->ulDai = tbCb->dai;
5654 tbCb->hqP->dlDai = dlDai;
5658 /* Updatijng pucchFdbkIdx for both PUCCH or PUSCH
5660 tbCb->pucchFdbkIdx = tbCb->hqP->ulDai;
5667 * @brief This function updates DAI or UL index.
5671 * Function: rgSCHCmnUpdDai
5672 * Purpose: Updates the DAI in the ack-nack info, a valid
5673 * ue should be passed
5677 * @param[in] RgDlHqProcCb *hqP
5678 * @param[in] RgSchDlSf *subFrm
5679 * @param[in] RgSchDlHqTbCb *tbCb
5684 PUBLIC U8 rgSCHCmnUpdDai
5687 CmLteTimingInfo *fdbkTime,
5690 RgSchDlHqProcCb *hqP,
5694 PUBLIC U8 rgSCHCmnUpdDai(ue, fdbkTime, m, havePdcch,tbCb,servCellId,hqP,ulDai)
5696 CmLteTimingInfo *fdbkTime;
5699 RgSchDlHqProcCb *hqP;
5703 RgSchTddANInfo *anInfo;
5705 U8 ackNackFdbkArrSize;
5708 TRC2(rgSCHCmnUpdDai);
5713 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
5714 hqP->hqE->cell->cellId,
5717 servCellIdx = RGSCH_PCELL_INDEX;
5719 ackNackFdbkArrSize = hqP->hqE->cell->ackNackFdbkArrSize;
5721 {/* SPS on primary cell */
5722 servCellIdx = RGSCH_PCELL_INDEX;
5723 ackNackFdbkArrSize = ue->cell->ackNackFdbkArrSize;
5727 anInfo = rgSCHUtlGetUeANFdbkInfo(ue, fdbkTime,servCellIdx);
5729 /* If no ACK/NACK feedback already present, create a new one */
5732 anInfo = &ue->cellInfo[servCellIdx]->anInfo[ue->cellInfo[servCellIdx]->nextFreeANIdx];
5733 anInfo->sfn = fdbkTime->sfn;
5734 anInfo->subframe = fdbkTime->subframe;
5735 anInfo->latestMIdx = m;
5736 /* Fixing DAI value - ccpu00109162 */
5737 /* Handle TDD case as in MIMO definition of the function */
5743 anInfo->isSpsOccasion = FALSE;
5744 /* set the free Index to store Ack/Nack Information*/
5745 ue->cellInfo[servCellIdx]->nextFreeANIdx = (ue->cellInfo[servCellIdx]->nextFreeANIdx + 1) %
5751 anInfo->latestMIdx = m;
5752 /* Fixing DAI value - ccpu00109162 */
5753 /* Handle TDD case as in MIMO definition of the function */
5754 anInfo->ulDai = anInfo->ulDai + 1;
5757 anInfo->dlDai = anInfo->dlDai + 1;
5761 /* ignoring the Scell check,
5762 * for primary cell this field is unused*/
5765 anInfo->n1ResTpcIdx = hqP->tpc;
5769 {/* As this not required for release pdcch */
5770 *ulDai = anInfo->ulDai;
5773 return (anInfo->dlDai);
5776 #endif /* ifdef LTE_TDD */
5778 PUBLIC U32 rgHqRvRetxCnt[4][2];
5779 PUBLIC U32 rgUlrate_grant;
5782 * @brief This function fills the HqP TB with rbAllocInfo.
5786 * Function: rgSCHCmnFillHqPTb
5787 * Purpose: This function fills in the HqP TB with rbAllocInfo.
5789 * Invoked by: rgSCHCmnFillHqPTb
5791 * @param[in] RgSchCellCb* cell
5792 * @param[in] RgSchDlRbAlloc *rbAllocInfo,
5793 * @param[in] U8 tbAllocIdx
5794 * @param[in] RgSchPdcch *pdcch
5800 PUBLIC Void rgSCHCmnFillHqPTb
5803 RgSchDlRbAlloc *rbAllocInfo,
5808 PUBLIC Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5810 RgSchDlRbAlloc *rbAllocInfo;
5816 PRIVATE Void rgSCHCmnFillHqPTb
5819 RgSchDlRbAlloc *rbAllocInfo,
5824 PRIVATE Void rgSCHCmnFillHqPTb(cell, rbAllocInfo, tbAllocIdx, pdcch)
5826 RgSchDlRbAlloc *rbAllocInfo;
5830 #endif /* LTEMAC_SPS */
5832 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
5833 RgSchDlTbAllocInfo *tbAllocInfo = &rbAllocInfo->tbInfo[tbAllocIdx];
5834 RgSchDlHqTbCb *tbInfo = tbAllocInfo->tbCb;
5835 RgSchDlHqProcCb *hqP = tbInfo->hqP;
5837 TRC2(rgSCHCmnFillHqPTb);
5839 /*ccpu00120365-ADD-if tb is disabled, set mcs=0,rv=1.
5840 * Relevant for DCI format 2 & 2A as per 36.213-7.1.7.2
5842 if ( tbAllocInfo->isDisabled)
5845 tbInfo->dlGrnt.iMcs = 0;
5846 tbInfo->dlGrnt.rv = 1;
5848 /* Fill for TB retransmission */
5849 else if (tbInfo->txCntr > 0)
5852 tbInfo->timingInfo = cmnCellDl->time;
5854 if ((tbInfo->isAckNackDtx == TFU_HQFDB_DTX))
5856 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5857 rgHqRvRetxCnt[tbInfo->dlGrnt.rv][tbInfo->tbIdx]++;
5861 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[++(tbInfo->ccchSchdInfo.rvIdx) & 0x03];
5864 /* fill the scheduler information of hqProc */
5865 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5866 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx,hqP->tbInfo,tbInfo->tbIdx );
5867 rgSCHDhmHqTbRetx(hqP->hqE, tbInfo->timingInfo, hqP, tbInfo->tbIdx);
5869 /* Fill for TB transmission */
5872 /* Fill the HqProc */
5873 tbInfo->dlGrnt.iMcs = tbAllocInfo->imcs;
5874 tbInfo->tbSz = tbAllocInfo->bytesAlloc;
5875 tbInfo->timingInfo = cmnCellDl->time;
5877 tbInfo->dlGrnt.rv = rgSchCmnDlRvTbl[0];
5878 /* fill the scheduler information of hqProc */
5879 tbInfo->ccchSchdInfo.rvIdx = 0;
5880 tbInfo->ccchSchdInfo.totBytes = tbAllocInfo->bytesAlloc;
5881 /* DwPts Scheduling Changes Start */
5882 /* DwPts Scheduling Changes End */
5883 cell->measurements.dlBytesCnt += tbAllocInfo->bytesAlloc;
5886 /*ccpu00120365:-ADD-only add to subFrm list if tb is not disabled */
5887 if ( tbAllocInfo->isDisabled == FALSE )
5889 /* Set the number of transmitting SM layers for this TB */
5890 tbInfo->numLyrs = tbAllocInfo->noLyr;
5891 /* Set the TB state as WAITING to indicate TB has been
5892 * considered for transmission */
5893 tbInfo->state = HQ_TB_WAITING;
5894 hqP->subFrm = rbAllocInfo->dlSf;
5895 tbInfo->hqP->pdcch = pdcch;
5896 //tbInfo->dlGrnt.numRb = rbAllocInfo->rbsAlloc;
5897 rgSCHUtlDlHqPTbAddToTx(hqP->subFrm, hqP, tbInfo->tbIdx);
5903 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
5907 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
5908 * Purpose: This function fills in the PDCCH information
5909 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
5910 * for dedicated service scheduling. It also
5911 * obtains TPC to be filled in from the power module.
5912 * Assign the PDCCH to HQProc.
5914 * Invoked by: Downlink Scheduler
5916 * @param[in] RgSchCellCb* cell
5917 * @param[in] RgSchDlRbAlloc* rbAllocInfo
5918 * @param[in] RgDlHqProc* hqP
5919 * @param[out] RgSchPdcch *pdcch
5925 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2
5928 RgSchDlRbAlloc *rbAllocInfo,
5929 RgSchDlHqProcCb *hqP,
5934 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, pdcch, tpc)
5936 RgSchDlRbAlloc *rbAllocInfo;
5937 RgSchDlHqProcCb *hqP;
5943 TRC2(rgSCHCmnFillHqPPdcchDciFrmtB1B2)
5945 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
5946 //Currently hardcoding values here.
5947 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
5948 switch(rbAllocInfo->dciFormat)
5950 case TFU_DCI_FORMAT_B1:
5952 pdcch->dci.u.formatB1Info.formatType = 0;
5953 pdcch->dci.u.formatB1Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5954 pdcch->dci.u.formatB1Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
5955 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
5956 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5957 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
5958 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
5959 pdcch->dci.u.formatB1Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5960 pdcch->dci.u.formatB1Info.CSI_BSI_BRI_Req = 0;
5961 pdcch->dci.u.formatB1Info.CSIRS_BRRS_TxTiming = 0;
5962 pdcch->dci.u.formatB1Info.CSIRS_BRRS_SymbIdx = 0;
5963 pdcch->dci.u.formatB1Info.CSIRS_BRRS_ProcInd = 0;
5964 pdcch->dci.u.formatB1Info.xPUCCH_TxTiming = 0;
5965 //TODO_SID: Need to update
5966 pdcch->dci.u.formatB1Info.freqResIdx_xPUCCH = 0;
5967 pdcch->dci.u.formatB1Info.beamSwitch = 0;
5968 pdcch->dci.u.formatB1Info.SRS_Config = 0;
5969 pdcch->dci.u.formatB1Info.SRS_Symbol = 0;
5970 //TODO_SID: Need to check.Currently setting 0(1 layer, ports(8) w/o OCC).
5971 pdcch->dci.u.formatB1Info.AntPorts_numLayers = 0;
5972 pdcch->dci.u.formatB1Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
5973 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
5974 pdcch->dci.u.formatB1Info.tpcCmd = 1; //tpc;
5975 pdcch->dci.u.formatB1Info.DL_PCRS = 0;
5978 case TFU_DCI_FORMAT_B2:
5980 pdcch->dci.u.formatB2Info.formatType = 1;
5981 pdcch->dci.u.formatB2Info.xPDSCHRange = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange;
5982 pdcch->dci.u.formatB2Info.RBAssign = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign;
5983 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.hqProcId = hqP->procId;
5984 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.mcs = rbAllocInfo->tbInfo[0].imcs;
5985 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
5986 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.RV = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
5987 pdcch->dci.u.formatB2Info.u.rbAssignB1Val324.bmiHqAckNack = 0;
5988 pdcch->dci.u.formatB2Info.CSI_BSI_BRI_Req = 0;
5989 pdcch->dci.u.formatB2Info.CSIRS_BRRS_TxTiming = 0;
5990 pdcch->dci.u.formatB2Info.CSIRS_BRRS_SymbIdx = 0;
5991 pdcch->dci.u.formatB2Info.CSIRS_BRRS_ProcInd = 0;
5992 pdcch->dci.u.formatB2Info.xPUCCH_TxTiming = 0;
5993 //TODO_SID: Need to update
5994 pdcch->dci.u.formatB2Info.freqResIdx_xPUCCH = 0;
5995 pdcch->dci.u.formatB2Info.beamSwitch = 0;
5996 pdcch->dci.u.formatB2Info.SRS_Config = 0;
5997 pdcch->dci.u.formatB2Info.SRS_Symbol = 0;
5998 //TODO_SID: Need to check.Currently setting 4(2 layer, ports(8,9) w/o OCC).
5999 pdcch->dci.u.formatB2Info.AntPorts_numLayers = 4;
6000 pdcch->dci.u.formatB2Info.SCID = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.SCID;
6001 //TODO_SID: Hardcoding TPC command to 1 i.e. No change
6002 pdcch->dci.u.formatB2Info.tpcCmd = 1; //tpc;
6003 pdcch->dci.u.formatB2Info.DL_PCRS = 0;
6007 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId," 5GTF_ERROR Allocator's icorrect "
6008 "dciForamt Fill RNTI:%d",rbAllocInfo->rnti);
6015 extern U32 totPcellSCell;
6016 extern U32 addedForScell;
6017 extern U32 addedForScell1;
6018 extern U32 addedForScell2;
6020 * @brief This function fills the PDCCH information from dlProc.
6024 * Function: rgSCHCmnFillHqPPdcch
6025 * Purpose: This function fills in the PDCCH information
6026 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6027 * for dedicated service scheduling. It also
6028 * obtains TPC to be filled in from the power module.
6029 * Assign the PDCCH to HQProc.
6031 * Invoked by: Downlink Scheduler
6033 * @param[in] RgSchCellCb* cell
6034 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6035 * @param[in] RgDlHqProc* hqP
6040 PUBLIC Void rgSCHCmnFillHqPPdcch
6043 RgSchDlRbAlloc *rbAllocInfo,
6044 RgSchDlHqProcCb *hqP
6047 PUBLIC Void rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP)
6049 RgSchDlRbAlloc *rbAllocInfo;
6050 RgSchDlHqProcCb *hqP;
6053 RgSchCmnDlCell *cmnCell = RG_SCH_CMN_GET_DL_CELL(cell);
6054 RgSchPdcch *pdcch = rbAllocInfo->pdcch;
6057 TRC2(rgSCHCmnFillHqPPdcch);
6062 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6069 tpc = rgSCHPwrPucchTpcForUe(cell, hqP->hqE->ue);
6071 /* Fix: syed moving this to a common function for both scheduled
6072 * and non-scheduled UEs */
6074 pdcch->ue = hqP->hqE->ue;
6075 if (hqP->hqE->ue->csgMmbrSta == FALSE)
6077 cmnCell->ncsgPrbCnt += rbAllocInfo->rbsAlloc;
6079 cmnCell->totPrbCnt += rbAllocInfo->rbsAlloc;
6082 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlPrbUsg +=
6083 rbAllocInfo->rbsAlloc;
6084 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw0iTbs +=
6085 rbAllocInfo->tbInfo[0].iTbs;
6086 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw0iTbs ++;
6087 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6088 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6091 totPcellSCell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6092 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6094 addedForScell += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6095 addedForScell1 += (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6097 printf (" Hqp %d cell %d addedForScell %lu addedForScell1 %lu sfn:sf %d:%d \n",
6099 hqP->hqE->cell->cellId,
6103 cell->crntTime.slot);
6107 hqP->hqE->cell->tenbStats->sch.dlPrbUsage[0] +=
6108 rbAllocInfo->rbsAlloc;
6109 hqP->hqE->cell->tenbStats->sch.dlSumCw0iTbs +=
6110 rbAllocInfo->tbInfo[0].iTbs;
6111 hqP->hqE->cell->tenbStats->sch.dlNumCw0iTbs ++;
6112 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6113 (rbAllocInfo->tbInfo[0].bytesAlloc << 3);
6114 if (rbAllocInfo->tbInfo[1].schdlngForTb)
6116 hqP->hqE->cell->tenbStats->sch.dlSumCw1iTbs +=
6117 rbAllocInfo->tbInfo[1].iTbs;
6118 hqP->hqE->cell->tenbStats->sch.dlNumCw1iTbs ++;
6119 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlSumCw1iTbs +=
6120 rbAllocInfo->tbInfo[1].iTbs;
6121 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlNumCw1iTbs ++;
6122 hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt +=
6123 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6127 if(RG_SCH_IS_CELL_SEC(hqP->hqE->ue, cell))
6129 addedForScell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6130 addedForScell2 += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6132 printf (" Hqp %d cell %d addedForScell %lu addedForScell2 %lu \n",
6134 hqP->hqE->cell->cellId,
6139 totPcellSCell += (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6143 hqP->hqE->cell->tenbStats->sch.dlTtlTpt +=
6144 (rbAllocInfo->tbInfo[1].bytesAlloc << 3);
6147 printf ("add DL TPT is %lu sfn:sf %d:%d \n", hqP->hqE->ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(hqP->hqE->cell)].dlTpt ,
6149 cell->crntTime.slot);
6155 pdcch->rnti = rbAllocInfo->rnti;
6156 pdcch->dci.dciFormat = rbAllocInfo->dciFormat;
6157 /* Update subframe and pdcch info in HqTb control block */
6158 switch(rbAllocInfo->dciFormat)
6161 case TFU_DCI_FORMAT_B1:
6162 case TFU_DCI_FORMAT_B2:
6164 // printf(" RG_5GTF:: Pdcch filling with DCI format B1/B2\n");
6165 rgSCHCmnFillHqPPdcchDciFrmtB1B2(cell, rbAllocInfo, hqP, \
6171 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6172 "Allocator's incorrect dciForamt Fill for RNTI:%d",rbAllocInfo->rnti);
6179 * @brief This function fills the PDCCH DCI format 1 information from dlProc.
6183 * Function: rgSCHCmnFillHqPPdcchDciFrmt1
6184 * Purpose: This function fills in the PDCCH information
6185 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6186 * for dedicated service scheduling. It also
6187 * obtains TPC to be filled in from the power module.
6188 * Assign the PDCCH to HQProc.
6190 * Invoked by: Downlink Scheduler
6192 * @param[in] RgSchCellCb* cell
6193 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6194 * @param[in] RgDlHqProc* hqP
6195 * @param[out] RgSchPdcch *pdcch
6202 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1
6205 RgSchDlRbAlloc *rbAllocInfo,
6206 RgSchDlHqProcCb *hqP,
6211 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1(cell, rbAllocInfo, hqP, pdcch, tpc)
6213 RgSchDlRbAlloc *rbAllocInfo;
6214 RgSchDlHqProcCb *hqP;
6221 RgSchTddANInfo *anInfo;
6225 /* For activation or reactivation,
6226 * Harq ProcId should be 0 */
6227 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6230 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1)
6232 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6233 pdcch->dci.u.format1Info.tpcCmd = tpc;
6234 /* Avoiding this check,as we dont support Type1 RA */
6236 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6239 pdcch->dci.u.format1Info.allocInfo.isAllocType0 = TRUE;
6240 pdcch->dci.u.format1Info.allocInfo.resAllocMap[0] =
6241 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6243 pdcch->dci.u.format1Info.allocInfo.resAllocMap[1] =
6244 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6246 pdcch->dci.u.format1Info.allocInfo.resAllocMap[2] =
6247 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6249 pdcch->dci.u.format1Info.allocInfo.resAllocMap[3] =
6250 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6255 if ((!(hqP->tbInfo[0].txCntr)) &&
6256 (cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6257 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6258 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV)))
6261 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6265 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6268 pdcch->dci.u.format1Info.allocInfo.harqProcId = hqP->procId;
6271 pdcch->dci.u.format1Info.allocInfo.ndi =
6272 rbAllocInfo->tbInfo[0].tbCb->ndi;
6273 pdcch->dci.u.format1Info.allocInfo.mcs =
6274 rbAllocInfo->tbInfo[0].imcs;
6275 pdcch->dci.u.format1Info.allocInfo.rv =
6276 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6278 if(hqP->hqE->ue != NULLP)
6281 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6282 hqP->hqE->cell->cellId,
6285 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6286 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6288 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6289 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6294 pdcch->dci.u.format1Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6298 /* Fixing DAI value - ccpu00109162 */
6299 pdcch->dci.u.format1Info.dai = RG_SCH_MAX_DAI_IDX;
6305 /* always 0 for RACH */
6306 pdcch->dci.u.format1Info.allocInfo.harqProcId = 0;
6308 /* Fixing DAI value - ccpu00109162 */
6309 pdcch->dci.u.format1Info.dai = 1;
6318 * @brief This function fills the PDCCH DCI format 1A information from dlProc.
6322 * Function: rgSCHCmnFillHqPPdcchDciFrmt1A
6323 * Purpose: This function fills in the PDCCH information
6324 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6325 * for dedicated service scheduling. It also
6326 * obtains TPC to be filled in from the power module.
6327 * Assign the PDCCH to HQProc.
6329 * Invoked by: Downlink Scheduler
6331 * @param[in] RgSchCellCb* cell
6332 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6333 * @param[in] RgDlHqProc* hqP
6334 * @param[out] RgSchPdcch *pdcch
6340 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A
6343 RgSchDlRbAlloc *rbAllocInfo,
6344 RgSchDlHqProcCb *hqP,
6349 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1A(cell, rbAllocInfo, hqP, pdcch, tpc)
6351 RgSchDlRbAlloc *rbAllocInfo;
6352 RgSchDlHqProcCb *hqP;
6359 RgSchTddANInfo *anInfo;
6363 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6366 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1A)
6368 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6369 pdcch->dci.u.format1aInfo.isPdcchOrder = FALSE;
6370 pdcch->dci.u.format1aInfo.t.pdschInfo.tpcCmd = tpc;
6371 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.mcs = \
6372 rbAllocInfo->tbInfo[0].imcs;
6373 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres = TRUE;
6375 if ((!(hqP->tbInfo[0].txCntr)) &&
6376 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6377 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6378 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6381 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val = 0;
6385 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val
6389 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.val =
6392 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.ndi = \
6393 rbAllocInfo->tbInfo[0].tbCb->ndi;
6394 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv = \
6395 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6396 /* As of now, we do not support Distributed allocations */
6397 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.isLocal = TRUE;
6398 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.nGap2.pres = NOTPRSNT;
6399 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.type =
6401 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.alloc.u.riv =
6402 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6403 rbAllocInfo->allocInfo.raType2.rbStart,
6404 rbAllocInfo->allocInfo.raType2.numRb);
6406 if(hqP->hqE->ue != NULLP)
6409 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6410 hqP->hqE->cell->cellId,
6412 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6413 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6415 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6416 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6419 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6422 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val =
6423 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6427 /* Fixing DAI value - ccpu00109162 */
6428 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = RG_SCH_MAX_DAI_IDX;
6429 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6430 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6437 /* always 0 for RACH */
6438 pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.harqProcId.pres
6441 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.pres = TRUE;
6442 /* Fixing DAI value - ccpu00109162 */
6443 pdcch->dci.u.format1aInfo.t.pdschInfo.dai.val = 1;
6451 * @brief This function fills the PDCCH DCI format 1B information from dlProc.
6455 * Function: rgSCHCmnFillHqPPdcchDciFrmt1B
6456 * Purpose: This function fills in the PDCCH information
6457 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6458 * for dedicated service scheduling. It also
6459 * obtains TPC to be filled in from the power module.
6460 * Assign the PDCCH to HQProc.
6462 * Invoked by: Downlink Scheduler
6464 * @param[in] RgSchCellCb* cell
6465 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6466 * @param[in] RgDlHqProc* hqP
6467 * @param[out] RgSchPdcch *pdcch
6473 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B
6476 RgSchDlRbAlloc *rbAllocInfo,
6477 RgSchDlHqProcCb *hqP,
6482 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt1B(cell, rbAllocInfo, hqP, pdcch, tpc)
6484 RgSchDlRbAlloc *rbAllocInfo;
6485 RgSchDlHqProcCb *hqP;
6492 RgSchTddANInfo *anInfo;
6496 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6499 TRC2(rgSCHCmnFillHqPPdcchDciFrmt1B)
6501 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6502 pdcch->dci.u.format1bInfo.tpcCmd = tpc;
6503 pdcch->dci.u.format1bInfo.allocInfo.mcs = \
6504 rbAllocInfo->tbInfo[0].imcs;
6506 if ((!(hqP->tbInfo[0].txCntr)) &&
6507 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6508 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6509 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6512 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = 0;
6516 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6519 pdcch->dci.u.format1bInfo.allocInfo.harqProcId = hqP->procId;
6521 pdcch->dci.u.format1bInfo.allocInfo.ndi = \
6522 rbAllocInfo->tbInfo[0].tbCb->ndi;
6523 pdcch->dci.u.format1bInfo.allocInfo.rv = \
6524 rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6525 /* As of now, we do not support Distributed allocations */
6526 pdcch->dci.u.format1bInfo.allocInfo.isLocal = TRUE;
6527 pdcch->dci.u.format1bInfo.allocInfo.nGap2.pres = NOTPRSNT;
6528 pdcch->dci.u.format1bInfo.allocInfo.alloc.type =
6530 pdcch->dci.u.format1bInfo.allocInfo.alloc.u.riv =
6531 rgSCHCmnCalcRiv (cell->bwCfg.dlTotalBw,
6532 rbAllocInfo->allocInfo.raType2.rbStart,
6533 rbAllocInfo->allocInfo.raType2.numRb);
6534 /* Fill precoding Info */
6535 pdcch->dci.u.format1bInfo.allocInfo.pmiCfm = \
6536 rbAllocInfo->mimoAllocInfo.precIdxInfo >> 4;
6537 pdcch->dci.u.format1bInfo.allocInfo.tPmi = \
6538 rbAllocInfo->mimoAllocInfo.precIdxInfo & 0x0F;
6540 if(hqP->hqE->ue != NULLP)
6543 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6544 hqP->hqE->cell->cellId,
6546 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6547 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6549 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6550 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6555 pdcch->dci.u.format1bInfo.dai =
6556 RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6560 pdcch->dci.u.format1bInfo.dai = RG_SCH_MAX_DAI_IDX;
6561 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6562 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6573 * @brief This function fills the PDCCH DCI format 2 information from dlProc.
6577 * Function: rgSCHCmnFillHqPPdcchDciFrmt2
6578 * Purpose: This function fills in the PDCCH information
6579 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6580 * for dedicated service scheduling. It also
6581 * obtains TPC to be filled in from the power module.
6582 * Assign the PDCCH to HQProc.
6584 * Invoked by: Downlink Scheduler
6586 * @param[in] RgSchCellCb* cell
6587 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6588 * @param[in] RgDlHqProc* hqP
6589 * @param[out] RgSchPdcch *pdcch
6595 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2
6598 RgSchDlRbAlloc *rbAllocInfo,
6599 RgSchDlHqProcCb *hqP,
6604 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2(cell, rbAllocInfo, hqP, pdcch, tpc)
6606 RgSchDlRbAlloc *rbAllocInfo;
6607 RgSchDlHqProcCb *hqP;
6614 RgSchTddANInfo *anInfo;
6618 /* ccpu00119023-ADD-For activation or reactivation,
6619 * Harq ProcId should be 0 */
6620 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6623 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2)
6625 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6626 /*ccpu00120365:-ADD-call also if tb is disabled */
6627 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6628 rbAllocInfo->tbInfo[1].isDisabled)
6630 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6632 pdcch->dci.u.format2Info.tpcCmd = tpc;
6633 /* Avoiding this check,as we dont support Type1 RA */
6635 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6638 pdcch->dci.u.format2Info.allocInfo.isAllocType0 = TRUE;
6639 pdcch->dci.u.format2Info.allocInfo.resAllocMap[0] =
6640 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6642 pdcch->dci.u.format2Info.allocInfo.resAllocMap[1] =
6643 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6645 pdcch->dci.u.format2Info.allocInfo.resAllocMap[2] =
6646 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6648 pdcch->dci.u.format2Info.allocInfo.resAllocMap[3] =
6649 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6654 if ((!(hqP->tbInfo[0].txCntr)) &&
6655 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6656 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6657 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6660 pdcch->dci.u.format2Info.allocInfo.harqProcId = 0;
6664 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6667 pdcch->dci.u.format2Info.allocInfo.harqProcId = hqP->procId;
6669 /* Initialize the TB info for both the TBs */
6670 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].mcs = 0;
6671 pdcch->dci.u.format2Info.allocInfo.tbInfo[0].rv = 1;
6672 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].mcs = 0;
6673 pdcch->dci.u.format2Info.allocInfo.tbInfo[1].rv = 1;
6674 /* Fill tbInfo for scheduled TBs */
6675 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6676 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6677 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6678 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6679 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6680 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6681 /* If we reach this function. It is safely assumed that
6682 * rbAllocInfo->tbInfo[0] always has non default valid values.
6683 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6684 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6686 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6687 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6688 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6689 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6690 pdcch->dci.u.format2Info.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6691 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6693 pdcch->dci.u.format2Info.allocInfo.transSwap =
6694 rbAllocInfo->mimoAllocInfo.swpFlg;
6695 pdcch->dci.u.format2Info.allocInfo.precoding =
6696 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6698 if(hqP->hqE->ue != NULLP)
6702 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6703 hqP->hqE->cell->cellId,
6705 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6706 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6708 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6709 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6714 pdcch->dci.u.format2Info.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6718 pdcch->dci.u.format2Info.dai = RG_SCH_MAX_DAI_IDX;
6719 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6720 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6730 * @brief This function fills the PDCCH DCI format 2A information from dlProc.
6734 * Function: rgSCHCmnFillHqPPdcchDciFrmt2A
6735 * Purpose: This function fills in the PDCCH information
6736 * obtained from the RgSchDlHqProcCb and RgSchDlRbAlloc
6737 * for dedicated service scheduling. It also
6738 * obtains TPC to be filled in from the power module.
6739 * Assign the PDCCH to HQProc.
6741 * Invoked by: Downlink Scheduler
6743 * @param[in] RgSchCellCb* cell
6744 * @param[in] RgSchDlRbAlloc* rbAllocInfo
6745 * @param[in] RgDlHqProc* hqP
6746 * @param[out] RgSchPdcch *pdcch
6752 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A
6755 RgSchDlRbAlloc *rbAllocInfo,
6756 RgSchDlHqProcCb *hqP,
6761 PRIVATE Void rgSCHCmnFillHqPPdcchDciFrmt2A(cell, rbAllocInfo, hqP, pdcch, tpc)
6763 RgSchDlRbAlloc *rbAllocInfo;
6764 RgSchDlHqProcCb *hqP;
6770 RgSchTddANInfo *anInfo;
6774 RgSchCmnDlHqProc *cmnHqDl = RG_SCH_CMN_GET_DL_HQP(hqP);
6777 TRC2(rgSCHCmnFillHqPPdcchDciFrmt2A)
6779 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 0, pdcch);
6780 /*ccpu00120365:-ADD-call also if tb is disabled */
6781 if (rbAllocInfo->tbInfo[1].schdlngForTb ||
6782 rbAllocInfo->tbInfo[1].isDisabled)
6785 rgSCHCmnFillHqPTb(cell, rbAllocInfo, 1, pdcch);
6788 pdcch->dci.u.format2AInfo.tpcCmd = tpc;
6789 /* Avoiding this check,as we dont support Type1 RA */
6791 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
6794 pdcch->dci.u.format2AInfo.allocInfo.isAllocType0 = TRUE;
6795 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[0] =
6796 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 24)
6798 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[1] =
6799 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 16)
6801 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[2] =
6802 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask >> 8)
6804 pdcch->dci.u.format2AInfo.allocInfo.resAllocMap[3] =
6805 ((rbAllocInfo->allocInfo.raType0.dlAllocBitMask & 0x000000ff));
6810 if ((!(hqP->tbInfo[0].txCntr)) &&
6811 ( cmnHqDl != (RgSchCmnDlHqProc*)NULLP &&
6812 ((cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_ACTV) ||
6813 (cmnHqDl->spsAction & RG_SCH_CMN_SPS_DL_REACTV))
6816 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = 0;
6820 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6823 pdcch->dci.u.format2AInfo.allocInfo.harqProcId = hqP->procId;
6825 /* Initialize the TB info for both the TBs */
6826 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].mcs = 0;
6827 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[0].rv = 1;
6828 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].mcs = 0;
6829 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[1].rv = 1;
6830 /* Fill tbInfo for scheduled TBs */
6831 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6832 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[0].tbCb->ndi;
6833 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6834 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[0].imcs;
6835 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[0].\
6836 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[0].tbCb->dlGrnt.rv;
6837 /* If we reach this function. It is safely assumed that
6838 * rbAllocInfo->tbInfo[0] always has non default valid values.
6839 * rbAllocInfo->tbInfo[1]'s scheduling is optional */
6841 if (rbAllocInfo->tbInfo[1].schdlngForTb == TRUE)
6843 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6844 tbCb->tbIdx].ndi = rbAllocInfo->tbInfo[1].tbCb->ndi;
6845 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6846 tbCb->tbIdx].mcs = rbAllocInfo->tbInfo[1].imcs;
6847 pdcch->dci.u.format2AInfo.allocInfo.tbInfo[rbAllocInfo->tbInfo[1].\
6848 tbCb->tbIdx].rv = rbAllocInfo->tbInfo[1].tbCb->dlGrnt.rv;
6851 pdcch->dci.u.format2AInfo.allocInfo.transSwap =
6852 rbAllocInfo->mimoAllocInfo.swpFlg;
6853 pdcch->dci.u.format2AInfo.allocInfo.precoding =
6854 rbAllocInfo->mimoAllocInfo.precIdxInfo;
6856 if(hqP->hqE->ue != NULLP)
6859 U8 servCellIdx = rgSchUtlGetServCellIdx(hqP->hqE->cell->instIdx,
6860 hqP->hqE->cell->cellId,
6862 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6863 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),servCellIdx);
6865 anInfo = rgSCHUtlGetUeANFdbkInfo(hqP->hqE->ue,
6866 &(rbAllocInfo->tbInfo[0].tbCb->fdbkTime),0);
6871 pdcch->dci.u.format2AInfo.dai = RG_SCH_GET_DAI_VALUE(anInfo->dlDai);
6875 pdcch->dci.u.format2AInfo.dai = RG_SCH_MAX_DAI_IDX;
6876 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
6877 "PDCCH is been scheduled without updating anInfo RNTI:%d",
6889 * @brief init of Sch vars.
6893 * Function: rgSCHCmnInitVars
6894 Purpose: Initialization of various UL subframe indices
6896 * @param[in] RgSchCellCb *cell
6901 PRIVATE Void rgSCHCmnInitVars
6906 PRIVATE Void rgSCHCmnInitVars(cell)
6910 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6912 TRC2(rgSCHCmnInitVars);
6914 cellUl->idx = RGSCH_INVALID_INFO;
6915 cellUl->schdIdx = RGSCH_INVALID_INFO;
6916 cellUl->schdHqProcIdx = RGSCH_INVALID_INFO;
6917 cellUl->msg3SchdIdx = RGSCH_INVALID_INFO;
6919 cellUl->emtcMsg3SchdIdx = RGSCH_INVALID_INFO;
6921 cellUl->msg3SchdHqProcIdx = RGSCH_INVALID_INFO;
6922 cellUl->rcpReqIdx = RGSCH_INVALID_INFO;
6923 cellUl->hqFdbkIdx[0] = RGSCH_INVALID_INFO;
6924 cellUl->hqFdbkIdx[1] = RGSCH_INVALID_INFO;
6925 cellUl->reTxIdx[0] = RGSCH_INVALID_INFO;
6926 cellUl->reTxIdx[1] = RGSCH_INVALID_INFO;
6927 /* Stack Crash problem for TRACE5 Changes. Added the return below */
6934 * @brief Updation of Sch vars per TTI.
6938 * Function: rgSCHCmnUpdVars
6939 * Purpose: Updation of Sch vars per TTI.
6941 * @param[in] RgSchCellCb *cell
6946 PUBLIC Void rgSCHCmnUpdVars
6951 PUBLIC Void rgSCHCmnUpdVars(cell)
6955 CmLteTimingInfo timeInfo;
6956 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
6959 TRC2(rgSCHCmnUpdVars);
6961 idx = (cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot);
6962 cellUl->idx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6964 printf("idx %d cellUl->idx %d RGSCH_NUM_SUB_FRAMES_5G %d time(%d %d) \n",idx,cellUl->idx ,RGSCH_NUM_SUB_FRAMES_5G,cell->crntTime.sfn,cell->crntTime.slot);
6966 /* Need to scheduler for after SCHED_DELTA */
6967 /* UL allocation has been advanced by 1 subframe
6968 * so that we do not wrap around and send feedback
6969 * before the data is even received by the PHY */
6970 /* Introduced timing delta for UL control */
6971 idx = (cellUl->idx + TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA);
6972 cellUl->schdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6974 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
6975 TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA)
6976 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
6978 /* ccpu00127193 filling schdTime for logging and enhancement purpose*/
6979 cellUl->schdTime = timeInfo;
6981 /* msg3 scheduling two subframes after general scheduling */
6982 idx = (cellUl->idx + RG_SCH_CMN_DL_DELTA + RGSCH_RARSP_MSG3_DELTA);
6983 cellUl->msg3SchdIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6985 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,
6986 RG_SCH_CMN_DL_DELTA+ RGSCH_RARSP_MSG3_DELTA)
6987 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
6989 idx = (cellUl->idx + TFU_RECPREQ_DLDELTA);
6991 cellUl->rcpReqIdx = ((idx) % (RG_SCH_CMN_UL_NUM_SF));
6993 /* Downlink harq feedback is sometime after data reception / harq failure */
6994 /* Since feedback happens prior to scheduling being called, we add 1 to */
6995 /* take care of getting the correct subframe for feedback */
6996 idx = (cellUl->idx - TFU_CRCIND_ULDELTA + RG_SCH_CMN_UL_NUM_SF);
6998 printf("Finally setting cellUl->hqFdbkIdx[0] = %d TFU_CRCIND_ULDELTA %d RG_SCH_CMN_UL_NUM_SF %d\n",idx,TFU_CRCIND_ULDELTA,RG_SCH_CMN_UL_NUM_SF);
7000 cellUl->hqFdbkIdx[0] = (idx % (RG_SCH_CMN_UL_NUM_SF));
7002 idx = ((cellUl->schdIdx) % (RG_SCH_CMN_UL_NUM_SF));
7004 cellUl->reTxIdx[0] = (U8) idx;
7006 printf("cellUl->hqFdbkIdx[0] %d cellUl->reTxIdx[0] %d \n",cellUl->hqFdbkIdx[0], cellUl->reTxIdx[0] );
7008 /* RACHO: update cmn sched specific RACH variables,
7009 * mainly the prachMaskIndex */
7010 rgSCHCmnUpdRachParam(cell);
7019 * @brief To get uplink subframe index associated with current PHICH
7024 * Function: rgSCHCmnGetPhichUlSfIdx
7025 * Purpose: Gets uplink subframe index associated with current PHICH
7026 * transmission based on SFN and subframe no
7028 * @param[in] CmLteTimingInfo *timeInfo
7029 * @param[in] RgSchCellCb *cell
7034 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx
7036 CmLteTimingInfo *timeInfo,
7040 PUBLIC U8 rgSCHCmnGetPhichUlSfIdx(timeInfo, cell)
7041 CmLteTimingInfo *timeInfo;
7045 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7047 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7053 TRC2(rgSCHCmnGetPhichUlSfIdx);
7055 dlsf = rgSCHUtlSubFrmGet(cell, *timeInfo);
7057 if(dlsf->phichOffInfo.sfnOffset == RGSCH_INVALID_INFO)
7059 return (RGSCH_INVALID_INFO);
7061 subframe = dlsf->phichOffInfo.subframe;
7063 sfn = (RGSCH_MAX_SFN + timeInfo->sfn -
7064 dlsf->phichOffInfo.sfnOffset) % RGSCH_MAX_SFN;
7066 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7067 * wrap case such that idx will be proper*/
7068 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7069 numUlSf = ((numUlSf * sfn) + rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subframe]) - 1;
7070 idx = numUlSf % (cellUl->numUlSubfrms);
7076 * @brief To get uplink subframe index.
7081 * Function: rgSCHCmnGetUlSfIdx
7082 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7084 * @param[in] CmLteTimingInfo *timeInfo
7085 * @param[in] U8 ulDlCfgIdx
7090 PUBLIC U8 rgSCHCmnGetUlSfIdx
7092 CmLteTimingInfo *timeInfo,
7096 PUBLIC U8 rgSCHCmnGetUlSfIdx(timeInfo, cell)
7097 CmLteTimingInfo *timeInfo;
7101 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
7102 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7106 TRC2(rgSCHCmnGetUlSfIdx);
7108 /* ccpu00130980: numUlSf(U16) parameter added to avoid integer
7109 * wrap case such that idx will be proper*/
7110 numUlSf = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7111 numUlSf = ((numUlSf * timeInfo->sfn) + \
7112 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->subframe]) - 1;
7113 idx = numUlSf % (cellUl->numUlSubfrms);
7121 * @brief To get uplink hq index.
7126 * Function: rgSCHCmnGetUlHqProcIdx
7127 * Purpose: Gets uplink subframe index based on SFN and subframe number.
7129 * @param[in] CmLteTimingInfo *timeInfo
7130 * @param[in] U8 ulDlCfgIdx
7135 PUBLIC U8 rgSCHCmnGetUlHqProcIdx
7137 CmLteTimingInfo *timeInfo,
7141 PUBLIC U8 rgSCHCmnGetUlHqProcIdx(timeInfo, cell)
7142 CmLteTimingInfo *timeInfo;
7150 numUlSf = (timeInfo->sfn * RGSCH_NUM_SUB_FRAMES_5G + timeInfo->slot);
7151 procId = numUlSf % RGSCH_NUM_UL_HQ_PROC;
7153 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
7154 /*ccpu00130639 - MOD - To get correct UL HARQ Proc IDs for all UL/DL Configs*/
7156 S8 sfnCycle = cell->tddHqSfnCycle;
7157 U8 numUlHarq = rgSchTddUlNumHarqProcTbl[ulDlCfgIdx]
7159 /* TRACE 5 Changes */
7160 TRC2(rgSCHCmnGetUlHqProcIdx);
7162 /* Calculate the number of UL SF in one SFN */
7163 numUlSfInSfn = RGSCH_NUM_SUB_FRAMES -
7164 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
7166 /* Check for the SFN wrap around case */
7167 if(cell->crntTime.sfn == 1023 && timeInfo->sfn == 0)
7171 else if(cell->crntTime.sfn == 0 && timeInfo->sfn == 1023)
7173 /* sfnCycle decremented by 1 */
7174 sfnCycle = (sfnCycle + numUlHarq-1) % numUlHarq;
7176 /* Calculate the total number of UL sf */
7177 /* -1 is done since uplink sf are counted from 0 */
7178 numUlSf = numUlSfInSfn * (timeInfo->sfn + (sfnCycle*1024)) +
7179 rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][timeInfo->slot] - 1;
7181 procId = numUlSf % numUlHarq;
7187 /* UL_ALLOC_CHANGES */
7188 /***********************************************************
7190 * Func : rgSCHCmnUlFreeAlloc
7192 * Desc : Free an allocation - invokes UHM and releases
7193 * alloc for the scheduler
7194 * Doest need subframe as argument
7202 **********************************************************/
7204 PUBLIC Void rgSCHCmnUlFreeAlloc
7210 PUBLIC Void rgSCHCmnUlFreeAlloc(cell, alloc)
7212 RgSchUlAlloc *alloc;
7215 RgSchUlHqProcCb *hqProc;
7216 TRC2(rgSCHCmnUlFreeAllocation);
7220 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7221 if ((alloc->hqProc->remTx == 0) &&
7222 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7225 RgSchRaCb *raCb = alloc->raCb;
7226 rgSCHUhmFreeProc(alloc->hqProc, cell);
7227 rgSCHUtlUlAllocRelease(alloc);
7228 rgSCHRamDelRaCb(cell, raCb, TRUE);
7233 hqProc = alloc->hqProc;
7234 rgSCHUtlUlAllocRelease(alloc);
7235 rgSCHUhmFreeProc(hqProc, cell);
7240 /***********************************************************
7242 * Func : rgSCHCmnUlFreeAllocation
7244 * Desc : Free an allocation - invokes UHM and releases
7245 * alloc for the scheduler
7253 **********************************************************/
7255 PUBLIC Void rgSCHCmnUlFreeAllocation
7262 PUBLIC Void rgSCHCmnUlFreeAllocation(cell, sf, alloc)
7265 RgSchUlAlloc *alloc;
7268 RgSchUlHqProcCb *hqProc;
7270 TRC2(rgSCHCmnUlFreeAllocation);
7274 /* Fix : Release RNTI upon MSG3 max TX failure for non-HO UEs */
7275 if ((alloc->hqProc->remTx == 0) &&
7276 (alloc->hqProc->rcvdCrcInd == FALSE) &&
7279 RgSchRaCb *raCb = alloc->raCb;
7280 rgSCHUhmFreeProc(alloc->hqProc, cell);
7281 rgSCHUtlUlAllocRls(sf, alloc);
7282 rgSCHRamDelRaCb(cell, raCb, TRUE);
7287 hqProc = alloc->hqProc;
7288 rgSCHUhmFreeProc(hqProc, cell);
7290 /* re-setting the PRB count while freeing the allocations */
7293 rgSCHUtlUlAllocRls(sf, alloc);
7299 * @brief This function implements PDCCH allocation for an UE
7300 * in the currently running subframe.
7304 * Function: rgSCHCmnPdcchAllocCrntSf
7305 * Purpose: This function determines current DL subframe
7306 * and UE DL CQI to call the actual pdcch allocator
7308 * Note that this function is called only
7309 * when PDCCH request needs to be made during
7310 * uplink scheduling.
7312 * Invoked by: Scheduler
7314 * @param[in] RgSchCellCb *cell
7315 * @param[in] RgSchUeCb *ue
7316 * @return RgSchPdcch *
7317 * -# NULLP when unsuccessful
7320 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf
7326 PUBLIC RgSchPdcch *rgSCHCmnPdcchAllocCrntSf(cell, ue)
7331 CmLteTimingInfo frm = cell->crntTime;
7332 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7334 RgSchPdcch *pdcch = NULLP;
7336 TRC2(rgSCHCmnPdcchAllocCrntSf);
7337 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7338 sf = rgSCHUtlSubFrmGet(cell, frm);
7341 if (ue->allocCmnUlPdcch)
7343 pdcch = rgSCHCmnCmnPdcchAlloc(cell, sf);
7344 /* Since CRNTI Scrambled */
7347 pdcch->dciNumOfBits = ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
7353 //pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, y, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_0, FALSE);
7354 pdcch = rgSCHCmnPdcchAlloc(cell, ue, sf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_A1, FALSE);
7359 /***********************************************************
7361 * Func : rgSCHCmnUlAllocFillNdmrs
7363 * Desc : Determines and fills N_dmrs for a UE uplink
7368 * Notes: N_dmrs determination is straightforward, so
7369 * it is configured per subband
7373 **********************************************************/
7375 PUBLIC Void rgSCHCmnUlAllocFillNdmrs
7377 RgSchCmnUlCell *cellUl,
7381 PUBLIC Void rgSCHCmnUlAllocFillNdmrs(cellUl, alloc)
7382 RgSchCmnUlCell *cellUl;
7383 RgSchUlAlloc *alloc;
7386 TRC2(rgSCHCmnUlAllocFillNdmrs);
7387 alloc->grnt.nDmrs = cellUl->dmrsArr[alloc->sbStart];
7391 /***********************************************************
7393 * Func : rgSCHCmnUlAllocLnkHqProc
7395 * Desc : Links a new allocation for an UE with the
7396 * appropriate HARQ process of the UE.
7404 **********************************************************/
7406 PUBLIC Void rgSCHCmnUlAllocLnkHqProc
7409 RgSchUlAlloc *alloc,
7410 RgSchUlHqProcCb *proc,
7414 PUBLIC Void rgSCHCmnUlAllocLnkHqProc(ue, alloc, proc, isRetx)
7416 RgSchUlAlloc *alloc;
7417 RgSchUlHqProcCb *proc;
7421 TRC2(rgSCHCmnUlAllocLnkHqProc);
7425 rgSCHCmnUlAdapRetx(alloc, proc);
7429 #ifdef LTE_L2_MEAS /* L2_COUNTERS */
7432 rgSCHUhmNewTx(proc, (((RgUeUlHqCb*)proc->hqEnt)->maxHqRetx), alloc);
7438 * @brief This function releases a PDCCH in the subframe that is
7439 * currently being allocated for.
7443 * Function: rgSCHCmnPdcchRlsCrntSf
7444 * Purpose: This function determines current DL subframe
7445 * which is considered for PDCCH allocation,
7446 * and then calls the actual function that
7447 * releases a PDCCH in a specific subframe.
7448 * Note that this function is called only
7449 * when PDCCH release needs to be made during
7450 * uplink scheduling.
7452 * Invoked by: Scheduler
7454 * @param[in] RgSchCellCb *cell
7455 * @param[in] RgSchPdcch *pdcch
7459 PUBLIC Void rgSCHCmnPdcchRlsCrntSf
7465 PUBLIC Void rgSCHCmnPdcchRlsCrntSf(cell, pdcch)
7470 CmLteTimingInfo frm = cell->crntTime;
7473 TRC2(rgSCHCmnPdcchRlsCrntSf);
7475 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
7476 sf = rgSCHUtlSubFrmGet(cell, frm);
7477 rgSCHUtlPdcchPut(cell, &sf->pdcchInfo, pdcch);
7480 /***********************************************************
7482 * Func : rgSCHCmnUlFillPdcchWithAlloc
7484 * Desc : Fills a PDCCH with format 0 information.
7492 **********************************************************/
7494 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc
7497 RgSchUlAlloc *alloc,
7501 PUBLIC Void rgSCHCmnUlFillPdcchWithAlloc(pdcch, alloc, ue)
7503 RgSchUlAlloc *alloc;
7508 TRC2(rgSCHCmnUlFillPdcchWithAlloc);
7511 pdcch->rnti = alloc->rnti;
7512 //pdcch->dci.dciFormat = TFU_DCI_FORMAT_A2;
7513 pdcch->dci.dciFormat = alloc->grnt.dciFrmt;
7515 //Currently hardcoding values here.
7516 //printf("Filling 5GTF UL DCI for rnti %d \n",alloc->rnti);
7517 switch(pdcch->dci.dciFormat)
7519 case TFU_DCI_FORMAT_A1:
7521 pdcch->dci.u.formatA1Info.formatType = 0;
7522 pdcch->dci.u.formatA1Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7523 pdcch->dci.u.formatA1Info.xPUSCH_TxTiming = 0;
7524 pdcch->dci.u.formatA1Info.RBAssign = alloc->grnt.rbAssign;
7525 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7526 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7527 pdcch->dci.u.formatA1Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7528 pdcch->dci.u.formatA1Info.CSI_BSI_BRI_Req = 0;
7529 pdcch->dci.u.formatA1Info.CSIRS_BRRS_TxTiming = 0;
7530 pdcch->dci.u.formatA1Info.CSIRS_BRRS_SymbIdx = 0;
7531 pdcch->dci.u.formatA1Info.CSIRS_BRRS_ProcInd = 0;
7532 pdcch->dci.u.formatA1Info.numBSI_Reports = 0;
7533 pdcch->dci.u.formatA1Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7534 pdcch->dci.u.formatA1Info.beamSwitch = 0;
7535 pdcch->dci.u.formatA1Info.SRS_Config = 0;
7536 pdcch->dci.u.formatA1Info.SRS_Symbol = 0;
7537 pdcch->dci.u.formatA1Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7538 pdcch->dci.u.formatA1Info.SCID = alloc->grnt.SCID;
7539 pdcch->dci.u.formatA1Info.PMI = alloc->grnt.PMI;
7540 pdcch->dci.u.formatA1Info.UL_PCRS = 0;
7541 pdcch->dci.u.formatA1Info.tpcCmd = alloc->grnt.tpc;
7544 case TFU_DCI_FORMAT_A2:
7546 pdcch->dci.u.formatA2Info.formatType = 1;
7547 pdcch->dci.u.formatA2Info.xPUSCHRange = alloc->grnt.xPUSCHRange;
7548 pdcch->dci.u.formatA2Info.xPUSCH_TxTiming = 0;
7549 pdcch->dci.u.formatA2Info.RBAssign = alloc->grnt.rbAssign;
7550 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.hqProcId = alloc->grnt.hqProcId;
7551 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.mcs = alloc->grnt.iMcsCrnt;
7552 pdcch->dci.u.formatA2Info.u.rbAssignA1Val324.ndi = alloc->hqProc->ndi;
7553 pdcch->dci.u.formatA2Info.CSI_BSI_BRI_Req = 0;
7554 pdcch->dci.u.formatA2Info.CSIRS_BRRS_TxTiming = 0;
7555 pdcch->dci.u.formatA2Info.CSIRS_BRRS_SymbIdx = 0;
7556 pdcch->dci.u.formatA2Info.CSIRS_BRRS_ProcInd = 0;
7557 pdcch->dci.u.formatA2Info.numBSI_Reports = 0;
7558 pdcch->dci.u.formatA2Info.uciOnxPUSCH = alloc->grnt.uciOnxPUSCH;
7559 pdcch->dci.u.formatA2Info.beamSwitch = 0;
7560 pdcch->dci.u.formatA2Info.SRS_Config = 0;
7561 pdcch->dci.u.formatA2Info.SRS_Symbol = 0;
7562 pdcch->dci.u.formatA2Info.REMapIdx_DMRS_PCRS_numLayers = 0;
7563 pdcch->dci.u.formatA2Info.SCID = alloc->grnt.SCID;
7564 pdcch->dci.u.formatA2Info.PMI = alloc->grnt.PMI;
7565 pdcch->dci.u.formatA2Info.UL_PCRS = 0;
7566 pdcch->dci.u.formatA2Info.tpcCmd = alloc->grnt.tpc;
7570 RLOG1(L_ERROR," 5GTF_ERROR UL Allocator's icorrect "
7571 "dciForamt Fill RNTI:%d",alloc->rnti);
7579 /***********************************************************
7581 * Func : rgSCHCmnUlAllocFillTpc
7583 * Desc : Determines and fills TPC for an UE allocation.
7591 **********************************************************/
7593 PUBLIC Void rgSCHCmnUlAllocFillTpc
7600 PUBLIC Void rgSCHCmnUlAllocFillTpc(cell, ue, alloc)
7603 RgSchUlAlloc *alloc;
7606 TRC2(rgSCHCmnUlAllocFillTpc);
7607 alloc->grnt.tpc = rgSCHPwrPuschTpcForUe(cell, ue);
7612 /***********************************************************
7614 * Func : rgSCHCmnAddUeToRefreshQ
7616 * Desc : Adds a UE to refresh queue, so that the UE is
7617 * periodically triggered to refresh it's GBR and
7626 **********************************************************/
7628 PRIVATE Void rgSCHCmnAddUeToRefreshQ
7635 PRIVATE Void rgSCHCmnAddUeToRefreshQ(cell, ue, wait)
7641 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
7643 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
7645 TRC2(rgSCHCmnAddUeToRefreshQ);
7648 cmMemset((U8 *)&arg, 0, sizeof(arg));
7649 arg.tqCp = &sched->tmrTqCp;
7650 arg.tq = sched->tmrTq;
7651 arg.timers = &ueSchd->tmr;
7655 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
7662 * @brief Perform UE reset procedure.
7666 * Function : rgSCHCmnUlUeReset
7668 * This functions performs BSR resetting and
7669 * triggers UL specific scheduler
7670 * to Perform UE reset procedure.
7672 * @param[in] RgSchCellCb *cell
7673 * @param[in] RgSchUeCb *ue
7677 PRIVATE Void rgSCHCmnUlUeReset
7683 PRIVATE Void rgSCHCmnUlUeReset(cell, ue)
7688 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7689 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7691 RgSchCmnLcg *lcgCmn;
7693 RgSchCmnAllocRecord *allRcd;
7694 TRC2(rgSCHCmnUlUeReset);
7696 ue->ul.minReqBytes = 0;
7697 ue->ul.totalBsr = 0;
7699 ue->ul.nonGbrLcgBs = 0;
7700 ue->ul.effAmbr = ue->ul.cfgdAmbr;
7702 node = ueUl->ulAllocLst.first;
7705 allRcd = (RgSchCmnAllocRecord *)node->node;
7709 for(lcgCnt = 0; lcgCnt < RGSCH_MAX_LCG_PER_UE; lcgCnt++)
7711 lcgCmn = RG_SCH_CMN_GET_UL_LCG(&ue->ul.lcgArr[lcgCnt]);
7713 lcgCmn->reportedBs = 0;
7714 lcgCmn->effGbr = lcgCmn->cfgdGbr;
7715 lcgCmn->effDeltaMbr = lcgCmn->deltaMbr;
7717 rgSCHCmnUlUeDelAllocs(cell, ue);
7719 ue->isSrGrant = FALSE;
7721 cellSchd->apisUl->rgSCHUlUeReset(cell, ue);
7723 /* Stack Crash problem for TRACE5 changes. Added the return below */
7729 * @brief RESET UL CQI and DL CQI&RI to conservative values
7730 * for a reestablishing UE.
7734 * Function : rgSCHCmnResetRiCqi
7736 * RESET UL CQI and DL CQI&RI to conservative values
7737 * for a reestablishing UE
7739 * @param[in] RgSchCellCb *cell
7740 * @param[in] RgSchUeCb *ue
7744 PRIVATE Void rgSCHCmnResetRiCqi
7750 PRIVATE Void rgSCHCmnResetRiCqi(cell, ue)
7755 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7756 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
7757 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7758 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
7760 TRC2(rgSCHCmnResetRiCqi);
7762 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
7763 cell->isCpUlExtend);
7765 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
7766 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
7767 ueDl->mimoInfo.ri = 1;
7768 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
7769 (ue->mimoInfo.txMode == RGR_UE_TM_6))
7771 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
7773 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
7775 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
7778 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
7780 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
7784 /* Request for an early Aper CQI in case of reest */
7785 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
7786 if(acqiCb && acqiCb->aCqiCfg.pres)
7788 acqiCb->aCqiTrigWt = 0;
7796 * @brief Perform UE reset procedure.
7800 * Function : rgSCHCmnDlUeReset
7802 * This functions performs BO resetting and
7803 * triggers DL specific scheduler
7804 * to Perform UE reset procedure.
7806 * @param[in] RgSchCellCb *cell
7807 * @param[in] RgSchUeCb *ue
7811 PRIVATE Void rgSCHCmnDlUeReset
7817 PRIVATE Void rgSCHCmnDlUeReset(cell, ue)
7822 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7823 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
7824 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
7826 TRC2(rgSCHCmnDlUeReset);
7828 if (ueDl->rachInfo.poLnk.node != NULLP)
7830 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
7833 /* Fix: syed Remove from TA List if this UE is there.
7834 * If TA Timer is running. Stop it */
7835 if (ue->dlTaLnk.node)
7837 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
7838 ue->dlTaLnk.node = (PTR)NULLP;
7840 else if (ue->taTmr.tmrEvnt != TMR_NONE)
7842 rgSCHTmrStopTmr(cell, ue->taTmr.tmrEvnt, ue);
7845 cellSchd->apisDl->rgSCHDlUeReset(cell, ue);
7849 rgSCHSCellDlUeReset(cell,ue);
7855 * @brief Perform UE reset procedure.
7859 * Function : rgSCHCmnUeReset
7861 * This functions triggers specific scheduler
7862 * to Perform UE reset procedure.
7864 * @param[in] RgSchCellCb *cell
7865 * @param[in] RgSchUeCb *ue
7871 PUBLIC Void rgSCHCmnUeReset
7877 PUBLIC Void rgSCHCmnUeReset(cell, ue)
7884 RgInfResetHqEnt hqEntRstInfo;
7886 TRC2(rgSCHCmnUeReset);
7887 /* RACHO: remove UE from pdcch, handover and rapId assoc Qs */
7888 rgSCHCmnDelRachInfo(cell, ue);
7890 rgSCHPwrUeReset(cell, ue);
7892 rgSCHCmnUlUeReset(cell, ue);
7893 rgSCHCmnDlUeReset(cell, ue);
7896 /* Making allocCmnUlPdcch TRUE to allocate DCI0/1A from Common search space.
7897 As because multiple cells are added hence 2 bits CqiReq is there
7898 This flag will be set to FALSE once we will get Scell READY */
7899 ue->allocCmnUlPdcch = TRUE;
7902 /* Fix : syed RESET UL CQI and DL CQI&RI to conservative values
7903 * for a reestablishing UE */
7904 /*Reset Cqi Config for all the configured cells*/
7905 for (idx = 0;idx < CM_LTE_MAX_CELLS; idx++)
7907 if (ue->cellInfo[idx] != NULLP)
7909 rgSCHCmnResetRiCqi(ue->cellInfo[idx]->cell, ue);
7912 /*After Reset Trigger APCQI for Pcell*/
7913 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
7914 if(pCellInfo->acqiCb.aCqiCfg.pres)
7916 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
7919 /* sending HqEnt reset to MAC */
7920 hqEntRstInfo.cellId = cell->cellId;
7921 hqEntRstInfo.crnti = ue->ueId;
7923 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
7924 RgSchMacRstHqEnt(&pst,&hqEntRstInfo);
7930 * @brief UE out of MeasGap or AckNackReptn.
7934 * Function : rgSCHCmnActvtUlUe
7936 * This functions triggers specific scheduler
7937 * to start considering it for scheduling.
7939 * @param[in] RgSchCellCb *cell
7940 * @param[in] RgSchUeCb *ue
7946 PUBLIC Void rgSCHCmnActvtUlUe
7952 PUBLIC Void rgSCHCmnActvtUlUe(cell, ue)
7957 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7958 TRC2(rgSCHCmnActvtUlUe);
7960 /* : take care of this in UL retransmission */
7961 cellSchd->apisUl->rgSCHUlActvtUe(cell, ue);
7966 * @brief UE out of MeasGap or AckNackReptn.
7970 * Function : rgSCHCmnActvtDlUe
7972 * This functions triggers specific scheduler
7973 * to start considering it for scheduling.
7975 * @param[in] RgSchCellCb *cell
7976 * @param[in] RgSchUeCb *ue
7982 PUBLIC Void rgSCHCmnActvtDlUe
7988 PUBLIC Void rgSCHCmnActvtDlUe(cell, ue)
7993 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
7994 TRC2(rgSCHCmnActvtDlUe);
7996 cellSchd->apisDl->rgSCHDlActvtUe(cell, ue);
8001 * @brief This API is invoked to indicate scheduler of a CRC indication.
8005 * Function : rgSCHCmnHdlUlTransInd
8006 * This API is invoked to indicate scheduler of a CRC indication.
8008 * @param[in] RgSchCellCb *cell
8009 * @param[in] RgSchUeCb *ue
8010 * @param[in] CmLteTimingInfo timingInfo
8015 PUBLIC Void rgSCHCmnHdlUlTransInd
8019 CmLteTimingInfo timingInfo
8022 PUBLIC Void rgSCHCmnHdlUlTransInd(cell, ue, timingInfo)
8025 CmLteTimingInfo timingInfo;
8028 TRC2(rgSCHCmnHdlUlTransInd);
8030 /* Update the latest UL dat/sig transmission time */
8031 RGSCHCPYTIMEINFO(timingInfo, ue->ul.ulTransTime);
8032 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
8034 /* Some UL Transmission from this UE.
8035 * Activate this UE if it was inactive */
8036 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8037 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
8045 * @brief Compute the minimum Rank based on Codebook subset
8046 * restriction configuration for 4 Tx Ports and Tx Mode 4.
8050 * Function : rgSCHCmnComp4TxMode4
8052 * Depending on BitMap set at CBSR during Configuration
8053 * - return the least possible Rank
8056 * @param[in] U32 *pmiBitMap
8057 * @return RgSchCmnRank
8060 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4
8065 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode4(pmiBitMap)
8069 U32 bitMap0, bitMap1;
8070 TRC2(rgSCHCmnComp4TxMode4);
8071 bitMap0 = pmiBitMap[0];
8072 bitMap1 = pmiBitMap[1];
8073 if((bitMap1) & 0xFFFF)
8075 return (RG_SCH_CMN_RANK_1);
8077 else if((bitMap1>>16) & 0xFFFF)
8079 return (RG_SCH_CMN_RANK_2);
8081 else if((bitMap0) & 0xFFFF)
8083 return (RG_SCH_CMN_RANK_3);
8085 else if((bitMap0>>16) & 0xFFFF)
8087 return (RG_SCH_CMN_RANK_4);
8091 return (RG_SCH_CMN_RANK_1);
8097 * @brief Compute the minimum Rank based on Codebook subset
8098 * restriction configuration for 2 Tx Ports and Tx Mode 4.
8102 * Function : rgSCHCmnComp2TxMode4
8104 * Depending on BitMap set at CBSR during Configuration
8105 * - return the least possible Rank
8108 * @param[in] U32 *pmiBitMap
8109 * @return RgSchCmnRank
8112 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4
8117 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode4(pmiBitMap)
8122 TRC2(rgSCHCmnComp2TxMode4);
8123 bitMap0 = pmiBitMap[0];
8124 if((bitMap0>>26)& 0x0F)
8126 return (RG_SCH_CMN_RANK_1);
8128 else if((bitMap0>>30) & 3)
8130 return (RG_SCH_CMN_RANK_2);
8134 return (RG_SCH_CMN_RANK_1);
8139 * @brief Compute the minimum Rank based on Codebook subset
8140 * restriction configuration for 4 Tx Ports and Tx Mode 3.
8144 * Function : rgSCHCmnComp4TxMode3
8146 * Depending on BitMap set at CBSR during Configuration
8147 * - return the least possible Rank
8150 * @param[in] U32 *pmiBitMap
8151 * @return RgSchCmnRank
8154 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3
8159 PRIVATE RgSchCmnRank rgSCHCmnComp4TxMode3(pmiBitMap)
8164 TRC2(rgSCHCmnComp4TxMode3);
8165 bitMap0 = pmiBitMap[0];
8166 if((bitMap0>>28)& 1)
8168 return (RG_SCH_CMN_RANK_1);
8170 else if((bitMap0>>29) &1)
8172 return (RG_SCH_CMN_RANK_2);
8174 else if((bitMap0>>30) &1)
8176 return (RG_SCH_CMN_RANK_3);
8178 else if((bitMap0>>31) &1)
8180 return (RG_SCH_CMN_RANK_4);
8184 return (RG_SCH_CMN_RANK_1);
8189 * @brief Compute the minimum Rank based on Codebook subset
8190 * restriction configuration for 2 Tx Ports and Tx Mode 3.
8194 * Function : rgSCHCmnComp2TxMode3
8196 * Depending on BitMap set at CBSR during Configuration
8197 * - return the least possible Rank
8200 * @param[in] U32 *pmiBitMap
8201 * @return RgSchCmnRank
8204 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3
8209 PRIVATE RgSchCmnRank rgSCHCmnComp2TxMode3(pmiBitMap)
8214 TRC2(rgSCHCmnComp2TxMode3);
8215 bitMap0 = pmiBitMap[0];
8216 if((bitMap0>>30)& 1)
8218 return (RG_SCH_CMN_RANK_1);
8220 else if((bitMap0>>31) &1)
8222 return (RG_SCH_CMN_RANK_2);
8226 return (RG_SCH_CMN_RANK_1);
8231 * @brief Compute the minimum Rank based on Codebook subset
8232 * restriction configuration.
8236 * Function : rgSCHCmnComputeRank
8238 * Depending on Num Tx Ports and Transmission mode
8239 * - return the least possible Rank
8242 * @param[in] RgrTxMode txMode
8243 * @param[in] U32 *pmiBitMap
8244 * @param[in] U8 numTxPorts
8245 * @return RgSchCmnRank
8248 PRIVATE RgSchCmnRank rgSCHCmnComputeRank
8255 PRIVATE RgSchCmnRank rgSCHCmnComputeRank(txMode, pmiBitMap, numTxPorts)
8261 TRC2(rgSCHCmnComputeRank);
8263 if (numTxPorts ==2 && txMode == RGR_UE_TM_3)
8265 return (rgSCHCmnComp2TxMode3(pmiBitMap));
8267 else if (numTxPorts ==4 && txMode == RGR_UE_TM_3)
8269 return (rgSCHCmnComp4TxMode3(pmiBitMap));
8271 else if (numTxPorts ==2 && txMode == RGR_UE_TM_4)
8273 return (rgSCHCmnComp2TxMode4(pmiBitMap));
8275 else if (numTxPorts ==4 && txMode == RGR_UE_TM_4)
8277 return (rgSCHCmnComp4TxMode4(pmiBitMap));
8281 return (RG_SCH_CMN_RANK_1);
8288 * @brief Harq Entity Deinitialization for CMN SCH.
8292 * Function : rgSCHCmnDlDeInitHqEnt
8294 * Harq Entity Deinitialization for CMN SCH
8296 * @param[in] RgSchCellCb *cell
8297 * @param[in] RgSchDlHqEnt *hqE
8300 /*KWORK_FIX:Changed function return type to void */
8302 PUBLIC Void rgSCHCmnDlDeInitHqEnt
8308 PUBLIC Void rgSCHCmnDlDeInitHqEnt(cell, hqE)
8313 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8314 RgSchDlHqProcCb *hqP;
8318 TRC2(rgSCHCmnDlDeInitHqEnt);
8320 ret = cellSchd->apisDl->rgSCHDlUeHqEntDeInit(cell, hqE);
8321 /* Free only If the Harq proc are created*/
8326 for(cnt = 0; cnt < hqE->numHqPrcs; cnt++)
8328 hqP = &hqE->procs[cnt];
8329 if ((RG_SCH_CMN_GET_DL_HQP(hqP)))
8331 rgSCHUtlFreeSBuf(cell->instIdx,
8332 (Data**)(&(hqP->sch)), (sizeof(RgSchCmnDlHqProc)));
8336 rgSCHLaaDeInitDlHqProcCb (cell, hqE);
8343 * @brief Harq Entity initialization for CMN SCH.
8347 * Function : rgSCHCmnDlInitHqEnt
8349 * Harq Entity initialization for CMN SCH
8351 * @param[in] RgSchCellCb *cell
8352 * @param[in] RgSchUeCb *ue
8358 PUBLIC S16 rgSCHCmnDlInitHqEnt
8364 PUBLIC S16 rgSCHCmnDlInitHqEnt(cell, hqEnt)
8366 RgSchDlHqEnt *hqEnt;
8370 RgSchDlHqProcCb *hqP;
8373 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8374 TRC2(rgSCHCmnDlInitHqEnt);
8376 for(cnt = 0; cnt < hqEnt->numHqPrcs; cnt++)
8378 hqP = &hqEnt->procs[cnt];
8379 if (rgSCHUtlAllocSBuf(cell->instIdx,
8380 (Data**)&(hqP->sch), (sizeof(RgSchCmnDlHqProc))) != ROK)
8386 if((cell->emtcEnable) &&(hqEnt->ue->isEmtcUe))
8388 if(ROK != cellSchd->apisEmtcDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8397 if(ROK != cellSchd->apisDl->rgSCHDlUeHqEntInit(cell, hqEnt))
8404 } /* rgSCHCmnDlInitHqEnt */
8407 * @brief This function computes distribution of refresh period
8411 * Function: rgSCHCmnGetRefreshDist
8412 * Purpose: This function computes distribution of refresh period
8413 * This is required to align set of UEs refresh
8414 * around the different consecutive subframe.
8416 * Invoked by: rgSCHCmnGetRefreshPerDist
8418 * @param[in] RgSchCellCb *cell
8419 * @param[in] RgSchUeCb *ue
8424 PRIVATE U8 rgSCHCmnGetRefreshDist
8430 PRIVATE U8 rgSCHCmnGetRefreshDist(cell, ue)
8437 Inst inst = cell->instIdx;
8439 TRC2(rgSCHCmnGetRefreshDist);
8441 for(refOffst = 0; refOffst < RGSCH_MAX_REFRESH_OFFSET; refOffst++)
8443 if(cell->refreshUeCnt[refOffst] < RGSCH_MAX_REFRESH_GRPSZ)
8445 cell->refreshUeCnt[refOffst]++;
8446 ue->refreshOffset = refOffst;
8447 /* printf("UE[%d] refresh offset[%d]. Cell refresh ue count[%d].\n", ue->ueId, refOffst, cell->refreshUeCnt[refOffst]); */
8452 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Allocation of refresh distribution failed\n"));
8453 /* We should not enter here normally, but incase of failure, allocating from last offset*/
8454 cell->refreshUeCnt[refOffst-1]++;
8455 ue->refreshOffset = refOffst-1;
8457 return (refOffst-1);
8460 * @brief This function computes initial Refresh Wait Period.
8464 * Function: rgSCHCmnGetRefreshPer
8465 * Purpose: This function computes initial Refresh Wait Period.
8466 * This is required to align multiple UEs refresh
8467 * around the same time.
8469 * Invoked by: rgSCHCmnGetRefreshPer
8471 * @param[in] RgSchCellCb *cell
8472 * @param[in] RgSchUeCb *ue
8473 * @param[in] U32 *waitPer
8478 PRIVATE Void rgSCHCmnGetRefreshPer
8485 PRIVATE Void rgSCHCmnGetRefreshPer(cell, ue, waitPer)
8494 TRC2(rgSCHCmnGetRefreshPer);
8496 refreshPer = RG_SCH_CMN_REFRESH_TIME * RG_SCH_CMN_REFRESH_TIMERES;
8497 crntSubFrm = cell->crntTime.sfn * RGSCH_NUM_SUB_FRAMES_5G + cell->crntTime.slot;
8498 /* Fix: syed align multiple UEs to refresh at same time */
8499 *waitPer = refreshPer - (crntSubFrm % refreshPer);
8500 *waitPer = RGSCH_CEIL(*waitPer, RG_SCH_CMN_REFRESH_TIMERES);
8501 *waitPer = *waitPer + rgSCHCmnGetRefreshDist(cell, ue);
8509 * @brief UE initialisation for scheduler.
8513 * Function : rgSCHCmnRgrSCellUeCfg
8515 * This functions intialises UE specific scheduler
8516 * information for SCELL
8517 * 0. Perform basic validations
8518 * 1. Allocate common sched UE cntrl blk
8519 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8521 * 4. Perform DLFS cfg
8523 * @param[in] RgSchCellCb *cell
8524 * @param[in] RgSchUeCb *ue
8525 * @param[out] RgSchErrInfo *err
8531 PUBLIC S16 rgSCHCmnRgrSCellUeCfg
8535 RgrUeSecCellCfg *sCellInfoCfg,
8539 PUBLIC S16 rgSCHCmnRgrSCellUeCfg(sCell, ue, sCellInfoCfg, err)
8542 RgrUeSecCellCfg *sCellInfoCfg;
8549 RgSchCmnAllocRecord *allRcd;
8550 RgSchDlRbAlloc *allocInfo;
8551 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8553 RgSchCmnUlUe *ueUlPcell;
8554 RgSchCmnUe *pCellUeSchCmn;
8555 RgSchCmnUe *ueSchCmn;
8557 RgSchCmnDlUe *pCellUeDl;
8559 Inst inst = ue->cell->instIdx;
8561 U32 idx = (U8)((sCell->cellId - rgSchCb[sCell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8562 TRC2(rgSCHCmnRgrSCellUeCfg);
8564 pCellUeSchCmn = RG_SCH_CMN_GET_UE(ue,ue->cell);
8565 pCellUeDl = &pCellUeSchCmn->dl;
8567 /* 1. Allocate Common sched control block */
8568 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8569 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8571 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Memory allocation FAILED\n"));
8572 err->errCause = RGSCHERR_SCH_CFG;
8575 ueSchCmn = RG_SCH_CMN_GET_UE(ue,sCell);
8577 /*2. Perform UEs downlink configuration */
8578 ueDl = &ueSchCmn->dl;
8581 ueDl->mimoInfo = pCellUeDl->mimoInfo;
8583 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) ||
8584 (ue->mimoInfo.txMode == RGR_UE_TM_6))
8586 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_NO_PMI);
8588 if (ue->mimoInfo.txMode == RGR_UE_TM_3)
8590 RG_SCH_CMN_SET_FORCE_TD(ue, sCell, RG_SCH_CMN_TD_RI_1);
8592 RGSCH_ARRAY_BOUND_CHECK(sCell->instIdx, rgUeCatTbl, pCellUeSchCmn->cmn.ueCat);
8593 ueDl->maxTbBits = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlTbBits;
8596 ri = RGSCH_MIN(ri, sCell->numTxAntPorts);
8597 if(((CM_LTE_UE_CAT_6 == pCellUeSchCmn->cmn.ueCat )
8598 ||(CM_LTE_UE_CAT_7 == pCellUeSchCmn->cmn.ueCat))
8601 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[1];
8605 ueDl->maxTbSz = rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxDlBits[0];
8608 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
8610 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8611 rgSchTddDlNumHarqProcTbl[sCell->ulDlCfgIdx]);
8613 ueDl->maxSbSz = (rgUeCatTbl[pCellUeSchCmn->cmn.ueCat].maxSftChBits/
8614 RGSCH_NUM_DL_HQ_PROC);
8617 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, ue->isEmtcUe);
8619 rgSCHCmnDlSetUeAllocLmt(sCell, ueDl, FALSE);
8623 /* ambrCfgd config moved to ueCb.dl, as it's not needed for per cell wise*/
8625 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, sCell);
8626 allocInfo->rnti = ue->ueId;
8628 /* Initializing the lastCfi value to current cfi value */
8629 ueDl->lastCfi = cellSchd->dl.currCfi;
8631 if ((cellSchd->apisDl->rgSCHRgrSCellDlUeCfg(sCell, ue, err)) != ROK)
8633 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "Spec Sched DL UE CFG FAILED\n"));
8637 /* TODO: enhance for DLFS RB Allocation for SCELLs in future dev */
8639 /* DLFS UE Config */
8640 if (cellSchd->dl.isDlFreqSel)
8642 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeCfg(sCell, ue, sCellInfoCfg, err)) != ROK)
8644 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS UE config FAILED\n"));
8649 /* TODO: Do UL SCELL CFG during UL CA dev */
8651 ueUl = RG_SCH_CMN_GET_UL_UE(ue, sCell);
8653 /* TODO_ULCA: SRS for SCELL needs to be handled in the below function call */
8654 rgSCHCmnUpdUeUlCqiInfo(sCell, ue, ueUl, ueSchCmn, cellSchd,
8655 sCell->isCpUlExtend);
8657 ret = rgSCHUhmHqEntInit(sCell, ue);
8660 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL UHM HARQ Ent Init "
8661 "Failed for CRNTI:%d", ue->ueId);
8665 ueUlPcell = RG_SCH_CMN_GET_UL_UE(ue, ue->cell);
8666 /* Initialize uplink HARQ related information for UE */
8667 ueUl->hqEnt.maxHqRetx = ueUlPcell->hqEnt.maxHqRetx;
8668 cmLListInit(&ueUl->hqEnt.free);
8669 cmLListInit(&ueUl->hqEnt.inUse);
8670 for(i=0; i < ueUl->hqEnt.numHqPrcs; i++)
8672 ueUl->hqEnt.hqProcCb[i].hqEnt = (void*)(&ueUl->hqEnt);
8673 ueUl->hqEnt.hqProcCb[i].procId = i;
8674 ueUl->hqEnt.hqProcCb[i].ulSfIdx = RGSCH_INVALID_INFO;
8675 ueUl->hqEnt.hqProcCb[i].alloc = NULLP;
8677 /* ccpu00139513- Initializing SPS flags*/
8678 ueUl->hqEnt.hqProcCb[i].isSpsActvnHqP = FALSE;
8679 ueUl->hqEnt.hqProcCb[i].isSpsOccnHqP = FALSE;
8681 cmLListAdd2Tail(&ueUl->hqEnt.free, &ueUl->hqEnt.hqProcCb[i].lnk);
8682 ueUl->hqEnt.hqProcCb[i].lnk.node = (PTR)&ueUl->hqEnt.hqProcCb[i];
8685 /* Allocate UL BSR allocation tracking List */
8686 cmLListInit(&ueUl->ulAllocLst);
8688 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
8690 if((rgSCHUtlAllocSBuf(sCell->instIdx,
8691 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
8693 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId,"SCELL Memory allocation FAILED"
8694 "for CRNTI:%d",ue->ueId);
8695 err->errCause = RGSCHERR_SCH_CFG;
8698 allRcd->allocTime = sCell->crntTime;
8699 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
8700 allRcd->lnk.node = (PTR)allRcd;
8703 /* After initialising UL part, do power related init */
8704 ret = rgSCHPwrUeSCellCfg(sCell, ue, sCellInfoCfg);
8707 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Could not do "
8708 "power config for UE CRNTI:%d",ue->ueId);
8713 if(TRUE == ue->isEmtcUe)
8715 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8717 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8718 "for CRNTI:%d",ue->ueId);
8725 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(sCell, ue, NULL, err)) != ROK)
8727 RLOG_ARG1(L_ERROR,DBG_CELLID,sCell->cellId, "Spec Sched UL UE CFG FAILED"
8728 "for CRNTI:%d",ue->ueId);
8733 ue->ul.isUlCaEnabled = TRUE;
8737 } /* rgSCHCmnRgrSCellUeCfg */
8741 * @brief UE initialisation for scheduler.
8745 * Function : rgSCHCmnRgrSCellUeDel
8747 * This functions Delete UE specific scheduler
8748 * information for SCELL
8750 * @param[in] RgSchCellCb *cell
8751 * @param[in] RgSchUeCb *ue
8757 PUBLIC S16 rgSCHCmnRgrSCellUeDel
8759 RgSchUeCellInfo *sCellInfo,
8763 PUBLIC S16 rgSCHCmnRgrSCellUeDel(sCellInfo, ue)
8764 RgSchUeCellInfo *sCellInfo;
8768 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(ue->cell);
8769 Inst inst = ue->cell->instIdx;
8771 TRC2(rgSCHCmnRgrSCellUeDel);
8773 cellSchd->apisDl->rgSCHRgrSCellDlUeDel(sCellInfo, ue);
8776 rgSCHCmnUlUeDelAllocs(sCellInfo->cell, ue);
8779 if(TRUE == ue->isEmtcUe)
8781 cellSchd->apisEmtcUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8786 cellSchd->apisUl->rgSCHFreeUlUe(sCellInfo->cell, ue);
8789 /* DLFS UE Config */
8790 if (cellSchd->dl.isDlFreqSel)
8792 if ((cellSchd->apisDlfs->rgSCHDlfsSCellUeDel(sCellInfo->cell, ue)) != ROK)
8794 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "DLFS Scell del FAILED\n"));
8799 rgSCHUtlFreeSBuf(sCellInfo->cell->instIdx,
8800 (Data**)(&(sCellInfo->sch)), (sizeof(RgSchCmnUe)));
8804 } /* rgSCHCmnRgrSCellUeDel */
8810 * @brief Handles 5gtf configuration for a UE
8814 * Function : rgSCHCmn5gtfUeCfg
8820 * @param[in] RgSchCellCb *cell
8821 * @param[in] RgSchUeCb *ue
8822 * @param[in] RgrUeCfg *cfg
8828 PUBLIC S16 rgSCHCmn5gtfUeCfg
8835 PUBLIC S16 rgSCHCmn5gtfUeCfg(cell, ue, cfg)
8841 TRC2(rgSCHCmnRgrUeCfg);
8843 RgSchUeGrp *ue5gtfGrp;
8844 ue->ue5gtfCb.grpId = cfg->ue5gtfCfg.grpId;
8845 ue->ue5gtfCb.BeamId = cfg->ue5gtfCfg.BeamId;
8846 ue->ue5gtfCb.numCC = cfg->ue5gtfCfg.numCC;
8847 ue->ue5gtfCb.mcs = cfg->ue5gtfCfg.mcs;
8848 ue->ue5gtfCb.maxPrb = cfg->ue5gtfCfg.maxPrb;
8850 ue->ue5gtfCb.cqiRiPer = 100;
8851 /* 5gtf TODO: CQIs to start from (10,0)*/
8852 ue->ue5gtfCb.nxtCqiRiOccn.sfn = 10;
8853 ue->ue5gtfCb.nxtCqiRiOccn.slot = 0;
8854 ue->ue5gtfCb.rank = 1;
8856 printf("\nschd cfg at mac,%u,%u,%u,%u,%u\n",ue->ue5gtfCb.grpId,ue->ue5gtfCb.BeamId,ue->ue5gtfCb.numCC,
8857 ue->ue5gtfCb.mcs,ue->ue5gtfCb.maxPrb);
8859 ue5gtfGrp = &(cell->cell5gtfCb.ueGrp5gConf[ue->ue5gtfCb.BeamId]);
8861 /* TODO_5GTF: Currently handling 1 group only. Need to update when multi group
8862 scheduling comes into picture */
8863 if(ue5gtfGrp->beamBitMask & (1 << ue->ue5gtfCb.BeamId))
8865 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8866 "5GTF_ERROR Invalid beam id CRNTI:%d",cfg->crnti);
8869 ue5gtfGrp->beamBitMask |= (1 << ue->ue5gtfCb.BeamId);
8876 * @brief UE initialisation for scheduler.
8880 * Function : rgSCHCmnRgrUeCfg
8882 * This functions intialises UE specific scheduler
8884 * 0. Perform basic validations
8885 * 1. Allocate common sched UE cntrl blk
8886 * 2. Perform DL cfg (allocate Hq Procs Cmn sched cntrl blks)
8888 * 4. Perform DLFS cfg
8890 * @param[in] RgSchCellCb *cell
8891 * @param[in] RgSchUeCb *ue
8892 * @param[int] RgrUeCfg *ueCfg
8893 * @param[out] RgSchErrInfo *err
8899 PUBLIC S16 rgSCHCmnRgrUeCfg
8907 PUBLIC S16 rgSCHCmnRgrUeCfg(cell, ue, ueCfg, err)
8914 RgSchDlRbAlloc *allocInfo;
8916 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
8917 RgSchCmnUe *ueSchCmn;
8921 RgSchCmnAllocRecord *allRcd;
8923 U32 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId)&(CM_LTE_MAX_CELLS-1));
8924 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
8925 TRC2(rgSCHCmnRgrUeCfg);
8928 /* 1. Allocate Common sched control block */
8929 if((rgSCHUtlAllocSBuf(cell->instIdx,
8930 (Data**)&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch)), (sizeof(RgSchCmnUe))) != ROK))
8932 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
8933 "Memory allocation FAILED for CRNTI:%d",ueCfg->crnti);
8934 err->errCause = RGSCHERR_SCH_CFG;
8937 ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
8938 ue->dl.ueDlCqiCfg = ueCfg->ueDlCqiCfg;
8939 pCellInfo->acqiCb.aCqiCfg = ueCfg->ueDlCqiCfg.aprdCqiCfg;
8940 if(ueCfg->ueCatEnum > 0 )
8942 /*KWORK_FIX removed NULL chk for ueSchCmn*/
8943 ueSchCmn->cmn.ueCat = ueCfg->ueCatEnum - 1;
8947 ueSchCmn->cmn.ueCat = 0; /* Assuming enum values correctly set */
8949 cmInitTimers(&ueSchCmn->cmn.tmr, 1);
8951 /*2. Perform UEs downlink configuration */
8952 ueDl = &ueSchCmn->dl;
8953 /* RACHO : store the rapId assigned for HandOver UE.
8954 * Append UE to handover list of cmnCell */
8955 if (ueCfg->dedPreambleId.pres == PRSNT_NODEF)
8957 rgSCHCmnDelDedPreamble(cell, ueCfg->dedPreambleId.val);
8958 ueDl->rachInfo.hoRapId = ueCfg->dedPreambleId.val;
8959 cmLListAdd2Tail(&cellSchd->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
8960 ueDl->rachInfo.hoLnk.node = (PTR)ue;
8963 rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd);
8965 if (ueCfg->txMode.pres == TRUE)
8967 if ((ueCfg->txMode.txModeEnum == RGR_UE_TM_4) ||
8968 (ueCfg->txMode.txModeEnum == RGR_UE_TM_6))
8970 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
8972 if (ueCfg->txMode.txModeEnum == RGR_UE_TM_3)
8974 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
8977 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
8978 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
8981 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
8982 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
8983 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
8986 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
8990 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
8993 /* Fix : syed Assign hqEnt to UE only if msg4 is done */
8995 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
8996 rgSchTddDlNumHarqProcTbl[cell->ulDlCfgIdx]);
8998 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
8999 RGSCH_NUM_DL_HQ_PROC);
9002 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, ue->isEmtcUe);
9004 rgSCHCmnDlSetUeAllocLmt(cell, ueDl, FALSE);
9006 /* if none of the DL and UL AMBR are configured then fail the configuration
9008 if((ueCfg->ueQosCfg.dlAmbr == 0) && (ueCfg->ueQosCfg.ueBr == 0))
9010 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"UL Ambr and DL Ambr are"
9011 "configured as 0 for CRNTI:%d",ueCfg->crnti);
9012 err->errCause = RGSCHERR_SCH_CFG;
9016 ue->dl.ambrCfgd = (ueCfg->ueQosCfg.dlAmbr * RG_SCH_CMN_REFRESH_TIME)/100;
9018 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
9019 allocInfo->rnti = ue->ueId;
9021 /* Initializing the lastCfi value to current cfi value */
9022 ueDl->lastCfi = cellSchd->dl.currCfi;
9024 if(cell->emtcEnable && ue->isEmtcUe)
9026 if ((cellSchd->apisEmtcDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9028 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9029 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9037 if ((cellSchd->apisDl->rgSCHRgrDlUeCfg(cell, ue, ueCfg, err)) != ROK)
9039 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9040 "Spec Sched DL UE CFG FAILED for CRNTI:%d",ueCfg->crnti);
9047 /* 3. Initialize ul part */
9048 ueUl = &ueSchCmn->ul;
9050 rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd,
9051 cell->isCpUlExtend);
9053 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9054 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9056 ue->ul.cfgdAmbr = (ueCfg->ueQosCfg.ueBr * RG_SCH_CMN_REFRESH_TIME)/100;
9057 ue->ul.effAmbr = ue->ul.cfgdAmbr;
9058 RGSCHCPYTIMEINFO(cell->crntTime, ue->ul.ulTransTime);
9060 /* Allocate UL BSR allocation tracking List */
9061 cmLListInit(&ueUl->ulAllocLst);
9063 for (cnt = 0; cnt < RG_SCH_CMN_MAX_ALLOC_TRACK; cnt++)
9065 if((rgSCHUtlAllocSBuf(cell->instIdx,
9066 (Data**)&(allRcd),sizeof(RgSchCmnAllocRecord)) != ROK))
9068 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation FAILED"
9069 "for CRNTI:%d",ueCfg->crnti);
9070 err->errCause = RGSCHERR_SCH_CFG;
9073 allRcd->allocTime = cell->crntTime;
9074 cmLListAdd2Tail(&ueUl->ulAllocLst, &allRcd->lnk);
9075 allRcd->lnk.node = (PTR)allRcd;
9077 /* Allocate common sch cntrl blocks for LCGs */
9078 for (cnt=0; cnt<RGSCH_MAX_LCG_PER_UE; cnt++)
9080 ret = rgSCHUtlAllocSBuf(cell->instIdx,
9081 (Data**)&(ue->ul.lcgArr[cnt].sch), (sizeof(RgSchCmnLcg)));
9084 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9085 "SCH struct alloc failed for CRNTI:%d",ueCfg->crnti);
9086 err->errCause = RGSCHERR_SCH_CFG;
9090 /* After initialising UL part, do power related init */
9091 ret = rgSCHPwrUeCfg(cell, ue, ueCfg);
9094 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9095 "power config for UE CRNTI:%d",ueCfg->crnti);
9099 ret = rgSCHCmnSpsUeCfg(cell, ue, ueCfg, err);
9102 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not do "
9103 "SPS config for CRNTI:%d",ueCfg->crnti);
9106 #endif /* LTEMAC_SPS */
9109 if(TRUE == ue->isEmtcUe)
9111 if ((cellSchd->apisEmtcUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9113 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9114 "for CRNTI:%d",ueCfg->crnti);
9121 if ((cellSchd->apisUl->rgSCHRgrUlUeCfg(cell, ue, ueCfg, err)) != ROK)
9123 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Spec Sched UL UE CFG FAILED"
9124 "for CRNTI:%d",ueCfg->crnti);
9129 /* DLFS UE Config */
9130 if (cellSchd->dl.isDlFreqSel)
9132 if ((cellSchd->apisDlfs->rgSCHDlfsUeCfg(cell, ue, ueCfg, err)) != ROK)
9134 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "DLFS UE config FAILED"
9135 "for CRNTI:%d",ueCfg->crnti);
9140 /* Fix: syed align multiple UEs to refresh at same time */
9141 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9142 /* Start UE Qos Refresh Timer */
9143 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9145 rgSCHCmn5gtfUeCfg(cell, ue, ueCfg);
9149 } /* rgSCHCmnRgrUeCfg */
9152 * @brief UE TX mode reconfiguration handler.
9156 * Function : rgSCHCmnDlHdlTxModeRecfg
9158 * This functions updates UE specific scheduler
9159 * information upon UE reconfiguration.
9161 * @param[in] RgSchUeCb *ue
9162 * @param[in] RgrUeRecfg *ueRecfg
9167 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9171 RgrUeRecfg *ueRecfg,
9175 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, numTxPorts)
9178 RgrUeRecfg *ueRecfg;
9183 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg
9190 PRIVATE Void rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg)
9193 RgrUeRecfg *ueRecfg;
9197 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9198 TRC2(rgSCHCmnDlHdlTxModeRecfg);
9200 if (ueRecfg->txMode.pres != PRSNT_NODEF)
9204 /* ccpu00140894- Starting Timer for TxMode Transition Completion*/
9205 ue->txModeTransCmplt =FALSE;
9206 rgSCHTmrStartTmr (ue->cell, ue, RG_SCH_TMR_TXMODE_TRNSTN, RG_SCH_TXMODE_TRANS_TIMER);
9207 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_CMPLT)
9209 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell,
9210 RG_SCH_CMN_TD_TXMODE_RECFG);
9211 /* MS_WORKAROUND for ccpu00123186 MIMO Fix Start: need to set FORCE TD bitmap based on TX mode */
9212 ueDl->mimoInfo.ri = 1;
9213 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9214 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9216 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9218 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9220 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9222 /* MIMO Fix End: need to set FORCE TD bitmap based on TX mode */
9225 if (ueRecfg->txMode.tmTrnstnState == RGR_TXMODE_RECFG_START)
9227 /* start afresh forceTD masking */
9228 RG_SCH_CMN_INIT_FORCE_TD(ue, cell, 0);
9229 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_TXMODE_RECFG);
9230 /* Intialize MIMO related parameters of UE */
9233 if(ueRecfg->txMode.pres)
9235 if((ueRecfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9236 (ueRecfg->txMode.txModeEnum ==RGR_UE_TM_4))
9238 if(ueRecfg->ueCodeBookRstRecfg.pres)
9241 rgSCHCmnComputeRank(ueRecfg->txMode.txModeEnum,
9242 ueRecfg->ueCodeBookRstRecfg.pmiBitMap, numTxPorts);
9246 ueDl->mimoInfo.ri = 1;
9251 ueDl->mimoInfo.ri = 1;
9256 ueDl->mimoInfo.ri = 1;
9259 ueDl->mimoInfo.ri = 1;
9260 #endif /* TFU_UPGRADE */
9261 if ((ueRecfg->txMode.txModeEnum == RGR_UE_TM_4) ||
9262 (ueRecfg->txMode.txModeEnum == RGR_UE_TM_6))
9264 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
9266 if (ueRecfg->txMode.txModeEnum == RGR_UE_TM_3)
9268 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
9273 /***********************************************************
9275 * Func : rgSCHCmnUpdUeMimoInfo
9277 * Desc : Updates UL and DL Ue Information
9285 **********************************************************/
9287 PRIVATE Void rgSCHCmnUpdUeMimoInfo
9292 RgSchCmnCell *cellSchd
9295 PRIVATE Void rgSCHCmnUpdUeMimoInfo(ueCfg, ueDl, cell, cellSchd)
9299 RgSchCmnCell *cellSchd;
9302 TRC2(rgSCHCmnUpdUeMimoInfo)
9304 if(ueCfg->txMode.pres)
9306 if((ueCfg->txMode.txModeEnum ==RGR_UE_TM_3) ||
9307 (ueCfg->txMode.txModeEnum ==RGR_UE_TM_4))
9309 if(ueCfg->ueCodeBookRstCfg.pres)
9312 rgSCHCmnComputeRank(ueCfg->txMode.txModeEnum,
9313 ueCfg->ueCodeBookRstCfg.pmiBitMap, cell->numTxAntPorts);
9317 ueDl->mimoInfo.ri = 1;
9322 ueDl->mimoInfo.ri = 1;
9327 ueDl->mimoInfo.ri = 1;
9331 ueDl->mimoInfo.ri = 1;
9332 #endif /*TFU_UPGRADE */
9333 ueDl->mimoInfo.cwInfo[0].cqi = cellSchd->dl.ccchCqi;
9334 ueDl->mimoInfo.cwInfo[1].cqi = cellSchd->dl.ccchCqi;
9338 /***********************************************************
9340 * Func : rgSCHCmnUpdUeUlCqiInfo
9342 * Desc : Updates UL and DL Ue Information
9350 **********************************************************/
9352 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo
9357 RgSchCmnUe *ueSchCmn,
9358 RgSchCmnCell *cellSchd,
9362 PRIVATE Void rgSCHCmnUpdUeUlCqiInfo(cell, ue, ueUl, ueSchCmn, cellSchd, isEcp)
9366 RgSchCmnUe *ueSchCmn;
9367 RgSchCmnCell *cellSchd;
9372 TRC2(rgSCHCmnUpdUeUlCqiInfo)
9375 if(ue->srsCb.srsCfg.type == RGR_SCH_SRS_SETUP)
9377 if(ue->ul.ulTxAntSel.pres)
9379 ueUl->crntUlCqi[ue->srsCb.selectedAnt] = cellSchd->ul.dfltUlCqi;
9380 ueUl->validUlCqi = ueUl->crntUlCqi[ue->srsCb.selectedAnt];
9384 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9385 ueUl->validUlCqi = ueUl->crntUlCqi[0];
9387 ue->validTxAnt = ue->srsCb.selectedAnt;
9391 ueUl->validUlCqi = cellSchd->ul.dfltUlCqi;
9395 ueUl->ulLaCb.cqiBasediTbs = rgSchCmnUlCqiToTbsTbl[isEcp]
9396 [ueUl->validUlCqi] * 100;
9397 ueUl->ulLaCb.deltaiTbs = 0;
9401 ueUl->crntUlCqi[0] = cellSchd->ul.dfltUlCqi;
9402 #endif /*TFU_UPGRADE */
9403 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgUeCatTbl, ueSchCmn->cmn.ueCat);
9404 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9406 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9410 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9415 /***********************************************************
9417 * Func : rgSCHCmnUpdUeCatCfg
9419 * Desc : Updates UL and DL Ue Information
9427 **********************************************************/
9429 PRIVATE Void rgSCHCmnUpdUeCatCfg
9435 PRIVATE Void rgSCHCmnUpdUeCatCfg(ue, cell)
9440 RgSchDlHqEnt *hqE = NULLP;
9441 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9442 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
9443 RgSchCmnUe *ueSchCmn = RG_SCH_CMN_GET_UE(ue,cell);
9444 RgSchCmnCell *cellSchd = RG_SCH_CMN_GET_CELL(cell);
9446 TRC2(rgSCHCmnUpdUeCatCfg)
9448 ueDl->maxTbBits = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlTbBits;
9450 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9453 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
9454 if(((CM_LTE_UE_CAT_6 == ueSchCmn->cmn.ueCat )
9455 ||(CM_LTE_UE_CAT_7 == ueSchCmn->cmn.ueCat))
9456 && (RG_SCH_MAX_TX_LYRS_4 == ri))
9458 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[1];
9462 ueDl->maxTbSz = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxDlBits[0];
9465 ueDl->maxSbSz = (rgUeCatTbl[ueSchCmn->cmn.ueCat].maxSftChBits/
9467 if (rgUeCatTbl[ueSchCmn->cmn.ueCat].ul64qamSup == FALSE)
9469 ueUl->maxUlCqi = cellSchd->ul.max16qamCqi;
9473 ueUl->maxUlCqi = RG_SCH_CMN_UL_NUM_CQI - 1;
9475 ue->ul.maxBytesPerUePerTti = rgUeCatTbl[ueSchCmn->cmn.ueCat].maxUlBits * \
9476 RG_SCH_CMN_MAX_BITS_RATIO / (RG_SCH_CMN_UL_COM_DENOM*8);
9481 * @brief UE reconfiguration for scheduler.
9485 * Function : rgSChCmnRgrUeRecfg
9487 * This functions updates UE specific scheduler
9488 * information upon UE reconfiguration.
9490 * @param[in] RgSchCellCb *cell
9491 * @param[in] RgSchUeCb *ue
9492 * @param[int] RgrUeRecfg *ueRecfg
9493 * @param[out] RgSchErrInfo *err
9499 PUBLIC S16 rgSCHCmnRgrUeRecfg
9503 RgrUeRecfg *ueRecfg,
9507 PUBLIC S16 rgSCHCmnRgrUeRecfg(cell, ue, ueRecfg, err)
9510 RgrUeRecfg *ueRecfg;
9514 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9517 TRC2(rgSCHCmnRgrUeRecfg);
9518 /* Basic validations */
9519 if (ueRecfg->ueRecfgTypes & RGR_UE_TXMODE_RECFG)
9522 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg, cell->numTxAntPorts);
9524 rgSCHCmnDlHdlTxModeRecfg(cell, ue, ueRecfg);
9525 #endif /* TFU_UPGRADE */
9527 if(ueRecfg->ueRecfgTypes & RGR_UE_CSG_PARAM_RECFG)
9529 ue->csgMmbrSta = ueRecfg->csgMmbrSta;
9531 /* Changes for UE Category reconfiguration feature */
9532 if(ueRecfg->ueRecfgTypes & RGR_UE_UECAT_RECFG)
9534 rgSCHCmnUpdUeCatCfg(ue, cell);
9536 if (ueRecfg->ueRecfgTypes & RGR_UE_APRD_DLCQI_RECFG)
9538 RgSchUeCellInfo *pCellInfo = RG_SCH_CMN_GET_PCELL_INFO(ue);
9539 pCellInfo->acqiCb.aCqiCfg = ueRecfg->aprdDlCqiRecfg;
9542 if (ueRecfg->ueRecfgTypes & RGR_UE_PRD_DLCQI_RECFG)
9544 if ((ueRecfg->prdDlCqiRecfg.pres == TRUE)
9545 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD10)
9546 && (ueRecfg->prdDlCqiRecfg.prdModeEnum != RGR_PRD_CQI_MOD20))
9548 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unsupported periodic CQI "
9549 "reporting mode %d for old CRNIT:%d",
9550 (int)ueRecfg->prdDlCqiRecfg.prdModeEnum,ueRecfg->oldCrnti);
9551 err->errCause = RGSCHERR_SCH_CFG;
9554 ue->dl.ueDlCqiCfg.prdCqiCfg = ueRecfg->prdDlCqiRecfg;
9558 if (ueRecfg->ueRecfgTypes & RGR_UE_ULPWR_RECFG)
9560 if (rgSCHPwrUeRecfg(cell, ue, ueRecfg) != ROK)
9562 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9563 "Power Reconfiguration Failed for OLD CRNTI:%d",ueRecfg->oldCrnti);
9568 if (ueRecfg->ueRecfgTypes & RGR_UE_QOS_RECFG)
9570 /* Uplink Sched related Initialization */
9571 if ((ueRecfg->ueQosRecfg.dlAmbr == 0) && (ueRecfg->ueQosRecfg.ueBr == 0))
9573 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Ul Ambr and DL Ambr "
9574 "configured as 0 for OLD CRNTI:%d",ueRecfg->oldCrnti);
9575 err->errCause = RGSCHERR_SCH_CFG;
9578 ue->ul.cfgdAmbr = (ueRecfg->ueQosRecfg.ueBr * \
9579 RG_SCH_CMN_REFRESH_TIME)/100;
9580 /* Downlink Sched related Initialization */
9581 ue->dl.ambrCfgd = (ueRecfg->ueQosRecfg.dlAmbr * \
9582 RG_SCH_CMN_REFRESH_TIME)/100;
9583 /* Fix: syed Update the effAmbr and effUeBR fields w.r.t the
9584 * new QOS configuration */
9585 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9586 /* Fix: syed align multiple UEs to refresh at same time */
9587 rgSCHCmnGetRefreshPer(cell, ue, &waitPer);
9588 rgSCHCmnApplyUeRefresh(cell, ue);
9589 rgSCHCmnAddUeToRefreshQ(cell, ue, waitPer);
9592 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9594 if ((cellSchCmn->apisEmtcUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9596 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9597 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9600 if ((cellSchCmn->apisEmtcDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9602 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9603 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9610 if ((cellSchCmn->apisUl->rgSCHRgrUlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9612 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9613 "Spec Sched UL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9616 if ((cellSchCmn->apisDl->rgSCHRgrDlUeRecfg(cell, ue, ueRecfg, err)) != ROK)
9618 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9619 "Spec Sched DL UE ReCFG FAILED for CRNTI:%d",ue->ueId);
9623 /* DLFS UE Config */
9624 if (cellSchCmn->dl.isDlFreqSel)
9626 if ((cellSchCmn->apisDlfs->rgSCHDlfsUeRecfg(cell, ue, \
9627 ueRecfg, err)) != ROK)
9629 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9630 "DLFS UE re-config FAILED for CRNTI:%d",ue->ueId);
9636 /* Invoke re-configuration on SPS module */
9637 if (rgSCHCmnSpsUeRecfg(cell, ue, ueRecfg, err) != ROK)
9639 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
9640 "DL SPS ReCFG FAILED for UE CRNTI:%d", ue->ueId);
9646 } /* rgSCHCmnRgrUeRecfg*/
9648 /***********************************************************
9650 * Func : rgSCHCmnUlUeDelAllocs
9652 * Desc : Deletion of all UE allocations.
9660 **********************************************************/
9662 PRIVATE Void rgSCHCmnUlUeDelAllocs
9668 PRIVATE Void rgSCHCmnUlUeDelAllocs(cell, ue)
9673 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
9674 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
9677 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
9679 TRC2(rgSCHCmnUlUeDelAllocs);
9681 for (i = 0; i < ueUl->hqEnt.numHqPrcs; ++i)
9683 RgSchUlHqProcCb *proc = rgSCHUhmGetUlHqProc(cell, ue, i);
9686 /* proc can't be NULL here */
9694 /* Added Insure Fixes Of reading Dangling memory.NULLed crntAlloc */
9696 if(proc->alloc == ulSpsUe->ulSpsSchdInfo.crntAlloc)
9698 ulSpsUe->ulSpsSchdInfo.crntAlloc = NULLP;
9699 ulSpsUe->ulSpsSchdInfo.crntAllocSf = NULLP;
9703 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9704 proc->alloc,ue->isEmtcUe);
9706 rgSCHCmnUlFreeAllocation(cell, &cellUl->ulSfArr[proc->ulSfIdx],
9709 /* PHY probably needn't be intimated since
9710 * whatever intimation it needs happens at the last minute
9713 /* Fix: syed Adaptive Msg3 Retx crash. Remove the harqProc
9714 * from adaptive retx List. */
9715 if (proc->reTxLnk.node)
9718 //TODO_SID: Need to take care
9719 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
9720 proc->reTxLnk.node = (PTR)NULLP;
9728 /***********************************************************
9730 * Func : rgSCHCmnDelUeFrmRefreshQ
9732 * Desc : Adds a UE to refresh queue, so that the UE is
9733 * periodically triggered to refresh it's GBR and
9742 **********************************************************/
9744 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ
9750 PRIVATE Void rgSCHCmnDelUeFrmRefreshQ(cell, ue)
9755 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
9757 RgSchCmnUeInfo *ueSchd = RG_SCH_CMN_GET_CMN_UE(ue);
9759 TRC2(rgSCHCmnDelUeFrmRefreshQ);
9761 #ifdef RGL_SPECIFIC_CHANGES
9762 if(ue->refreshOffset < RGSCH_MAX_REFRESH_GRPSZ)
9764 if(cell->refreshUeCnt[ue->refreshOffset])
9766 cell->refreshUeCnt[ue->refreshOffset]--;
9772 cmMemset((U8 *)&arg, 0, sizeof(arg));
9773 arg.tqCp = &sched->tmrTqCp;
9774 arg.tq = sched->tmrTq;
9775 arg.timers = &ueSchd->tmr;
9779 arg.evnt = RG_SCH_CMN_EVNT_UE_REFRESH;
9785 /***********************************************************
9787 * Func : rgSCHCmnUeCcchSduDel
9789 * Desc : Clear CCCH SDU scheduling context.
9797 **********************************************************/
9799 PRIVATE Void rgSCHCmnUeCcchSduDel
9805 PRIVATE Void rgSCHCmnUeCcchSduDel(cell, ueCb)
9810 RgSchDlHqEnt *hqE = NULLP;
9811 RgSchDlHqProcCb *ccchSduHqP = NULLP;
9812 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
9814 TRC2(rgSCHCmnUeCcchSduDel);
9816 hqE = RG_SCH_CMN_GET_UE_HQE(ueCb, cell);
9821 ccchSduHqP = hqE->ccchSduProc;
9822 if(ueCb->ccchSduLnk.node != NULLP)
9824 /* Remove the ccchSduProc if it is in the Tx list */
9825 cmLListDelFrm(&(cell->ccchSduUeLst), &(ueCb->ccchSduLnk));
9826 ueCb->ccchSduLnk.node = NULLP;
9828 else if(ccchSduHqP != NULLP)
9830 /* Fix for crash due to stale pdcch. Release ccch pdcch*/
9831 if(ccchSduHqP->pdcch)
9833 cmLListDelFrm(&ccchSduHqP->subFrm->pdcchInfo.pdcchs,
9834 &ccchSduHqP->pdcch->lnk);
9835 cmLListAdd2Tail(&cell->pdcchLst, &ccchSduHqP->pdcch->lnk);
9836 ccchSduHqP->pdcch = NULLP;
9838 if(ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node != NULLP)
9840 /* Remove the ccchSduProc if it is in the retx list */
9841 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
9842 &ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk);
9843 /* ccchSduHqP->tbInfo[0].ccchSchdInfo.retxLnk.node = NULLP; */
9844 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9846 else if ((ccchSduHqP->subFrm != NULLP) &&
9847 (ccchSduHqP->hqPSfLnk.node != NULLP))
9849 rgSCHUtlDlHqPTbRmvFrmTx(ccchSduHqP->subFrm,
9850 ccchSduHqP, 0, FALSE);
9851 rgSCHDhmRlsHqpTb(ccchSduHqP, 0, TRUE);
9861 * @brief UE deletion for scheduler.
9865 * Function : rgSCHCmnUeDel
9867 * This functions deletes all scheduler information
9868 * pertaining to an UE.
9870 * @param[in] RgSchCellCb *cell
9871 * @param[in] RgSchUeCb *ue
9875 PUBLIC Void rgSCHCmnUeDel
9881 PUBLIC Void rgSCHCmnUeDel(cell, ue)
9886 RgSchDlHqEnt *hqE = NULLP;
9887 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
9889 RgSchCmnAllocRecord *allRcd;
9891 RgSchCmnCell *cellSchCmn = RG_SCH_CMN_GET_CELL(cell);
9893 TRC2(rgSCHCmnUeDel);
9895 if (RG_SCH_CMN_GET_UE(ue,cell) == NULLP)
9897 /* Common scheduler config has not happened yet */
9900 hqE = RG_SCH_CMN_GET_UE_HQE(ue, cell);
9903 /* UE Free can be triggered before MSG4 done when dlHqE is not updated */
9907 rgSCHEmtcCmnUeCcchSduDel(cell, ue);
9912 rgSCHCmnUeCcchSduDel(cell, ue);
9915 rgSCHCmnDelUeFrmRefreshQ(cell, ue);
9917 rgSCHCmnUlUeDelAllocs(cell, ue);
9919 rgSCHCmnDelRachInfo(cell, ue);
9922 if(TRUE == ue->isEmtcUe)
9924 cellSchCmn->apisEmtcUl->rgSCHFreeUlUe(cell, ue);
9929 cellSchCmn->apisUl->rgSCHFreeUlUe(cell, ue);
9934 for(idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
9936 if(ue->cellInfo[idx] != NULLP)
9938 rgSCHSCellDelUeSCell(cell,ue,idx);
9945 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
9947 cellSchCmn->apisEmtcDl->rgSCHFreeDlUe(cell, ue);
9952 cellSchCmn->apisDl->rgSCHFreeDlUe(cell, ue);
9954 rgSCHPwrUeDel(cell, ue);
9957 rgSCHCmnSpsUeDel(cell, ue);
9958 #endif /* LTEMAC_SPS*/
9961 rgSchCmnDlSfHqDel(ue, cell);
9963 /* DLFS UE delete */
9964 if (cellSchCmn->dl.isDlFreqSel)
9966 cellSchCmn->apisDlfs->rgSCHDlfsUeDel(cell, ue);
9968 node = ueUl->ulAllocLst.first;
9970 /* ccpu00117052 - MOD - Passing double pointer in all the places of
9971 rgSCHUtlFreeSBuf function call for proper NULLP assignment*/
9974 allRcd = (RgSchCmnAllocRecord *)node->node;
9976 cmLListDelFrm(&ueUl->ulAllocLst, &allRcd->lnk);
9977 rgSCHUtlFreeSBuf(cell->instIdx,
9978 (Data**)(&allRcd), (sizeof(RgSchCmnAllocRecord)));
9981 for(cnt = 0; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
9983 if (ue->ul.lcgArr[cnt].sch != NULLP)
9985 rgSCHUtlFreeSBuf(cell->instIdx,
9986 (Data**)(&(ue->ul.lcgArr[cnt].sch)), (sizeof(RgSchCmnLcg)));
9990 /* Fix : syed Moved hqEnt deinit to rgSCHCmnDlDeInitHqEnt */
9991 idx = (U8)((cell->cellId - rgSchCb[cell->instIdx].genCfg.startCellId) & (CM_LTE_MAX_CELLS - 1));
9992 rgSCHUtlFreeSBuf(cell->instIdx,
9993 (Data**)(&(((ue->cellInfo[ue->cellIdToCellIdxMap[idx]])->sch))), (sizeof(RgSchCmnUe)));
9995 } /* rgSCHCmnUeDel */
9999 * @brief This function handles the common code rate configurations
10000 * done as part of RgrCellCfg/RgrCellRecfg.
10004 * Function: rgSCHCmnDlCnsdrCmnRt
10005 * Purpose: This function handles the common code rate configurations
10006 * done as part of RgrCellCfg/RgrCellRecfg.
10008 * Invoked by: Scheduler
10010 * @param[in] RgSchCellCb *cell
10011 * @param[in] RgrDlCmnCodeRateCfg *dlCmnCodeRate
10016 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt
10019 RgrDlCmnCodeRateCfg *dlCmnCodeRate
10022 PRIVATE S16 rgSCHCmnDlCnsdrCmnRt(cell, dlCmnCodeRate)
10024 RgrDlCmnCodeRateCfg *dlCmnCodeRate;
10027 RgSchCmnCell *cellDl = RG_SCH_CMN_GET_CELL(cell);
10034 TRC2(rgSCHCmnDlCnsdrCmnRt);
10036 /* code rate is bits per 1024 phy bits, since modl'n scheme is 2. it is
10037 * bits per 1024/2 REs */
10038 if (dlCmnCodeRate->bcchPchRaCodeRate != 0)
10040 bitsPerRb = ((dlCmnCodeRate->bcchPchRaCodeRate * 2) *
10041 cellDl->dl.noResPerRb[3])/1024;
10045 bitsPerRb = ((RG_SCH_CMN_DEF_BCCHPCCH_CODERATE * 2) *
10046 cellDl->dl.noResPerRb[3])/1024;
10048 /* Store bitsPerRb in cellDl->dl to use later to determine
10049 * Number of RBs for UEs with SI-RNTI, P-RNTI and RA-RNTI */
10050 cellDl->dl.bitsPerRb = bitsPerRb;
10051 /* ccpu00115595 end*/
10052 /* calculate the ITbs for 2 RBs. Initialize ITbs to MAX value */
10055 bitsPer2Rb = bitsPerRb * rbNum;
10056 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer2Rb))
10059 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs2Rbs = 0) :
10060 (cellDl->dl.cmnChITbs.iTbs2Rbs = i-1);
10062 /* calculate the ITbs for 3 RBs. Initialize ITbs to MAX value */
10065 bitsPer3Rb = bitsPerRb * rbNum;
10066 while ((i < 9) && (rgTbSzTbl[0][i][rbNum - 1] <= bitsPer3Rb))
10069 (i <= 1)? (cellDl->dl.cmnChITbs.iTbs3Rbs = 0) :
10070 (cellDl->dl.cmnChITbs.iTbs3Rbs = i-1);
10073 pdcchBits = 1 + /* Flag for format0/format1a differentiation */
10074 1 + /* Localized/distributed VRB assignment flag */
10077 3 + /* Harq process Id */
10079 4 + /* Harq process Id */
10080 2 + /* UL Index or DAI */
10082 1 + /* New Data Indicator */
10085 1 + rgSCHUtlLog32bitNbase2((cell->bwCfg.dlTotalBw * \
10086 (cell->bwCfg.dlTotalBw + 1))/2);
10087 /* Resource block assignment ceil[log2(bw(bw+1)/2)] : \
10088 Since VRB is local */
10089 /* For TDD consider DAI */
10091 /* Convert the pdcchBits to actual pdcchBits required for transmission */
10092 if (dlCmnCodeRate->pdcchCodeRate != 0)
10094 pdcchBits = (pdcchBits * 1024)/dlCmnCodeRate->pdcchCodeRate;
10095 if (pdcchBits <= 288) /* 288 : Num of pdcch bits for aggrLvl=4 */
10097 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10099 else /* 576 : Num of pdcch bits for aggrLvl=8 */
10101 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL8;
10106 cellDl->dl.cmnChAggrLvl = CM_LTE_AGGR_LVL4;
10108 if (dlCmnCodeRate->ccchCqi == 0)
10114 cellDl->dl.ccchCqi = dlCmnCodeRate->ccchCqi;
10121 * @brief This function handles the configuration of cell for the first
10122 * time by the scheduler.
10126 * Function: rgSCHCmnDlRgrCellCfg
10127 * Purpose: Configuration received is stored into the data structures
10128 * Also, update the scheduler with the number of frames of
10129 * RACH preamble transmission.
10131 * Invoked by: BO and Scheduler
10133 * @param[in] RgSchCellCb* cell
10134 * @param[in] RgrCellCfg* cfg
10139 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10146 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10152 RgSchCmnCell *cellSch;
10157 U8 maxDlSubfrms = cell->numDlSubfrms;
10158 U8 splSubfrmIdx = cfg->spclSfCfgIdx;
10161 RgSchTddSubfrmInfo subfrmInfo = rgSchTddMaxUlSubfrmTbl[cell->ulDlCfgIdx];
10172 TRC2(rgSCHCmnDlRgrCellCfg);
10175 cellSch = RG_SCH_CMN_GET_CELL(cell);
10176 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->\
10177 rachCfg.preambleFormat];
10178 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10179 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10181 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10182 3 TTI (MAX L1+L2 processing delay at the UE) */
10183 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10184 rgSchCmnHarqRtt[cell->ulDlCfgIdx] + 3;
10185 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10186 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10187 if (cfg->maxUePerDlSf == 0)
10189 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10191 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10197 if (cell->bwCfg.dlTotalBw <= 10)
10207 /* DwPTS Scheduling Changes Start */
10208 cellSch->dl.splSfCfg = splSubfrmIdx;
10210 if (cfg->isCpDlExtend == TRUE)
10212 if((0 == splSubfrmIdx) || (4 == splSubfrmIdx) ||
10213 (7 == splSubfrmIdx) || (8 == splSubfrmIdx)
10216 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10220 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10225 /* Refer to 36.213 Section 7.1.7 */
10226 if((0 == splSubfrmIdx) || (5 == splSubfrmIdx))
10228 cell->splSubfrmCfg.isDlDataAllowed = FALSE;
10232 cell->splSubfrmCfg.isDlDataAllowed = TRUE;
10235 /* DwPTS Scheduling Changes End */
10237 splSfCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10238 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
10240 for (sfCount = 0; sfCount < maxDlSubfrms; sfCount++)
10242 sf = cell->subFrms[sfCount];
10243 /* Sfcount matches the first special subframe occurs at Index 0
10244 * or subsequent special subframes */
10245 if(subfrmInfo.switchPoints == 1)
10247 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10248 RG_SCH_CMN_10_MS_PRD, &subfrmInfo);
10252 isSplfrm = rgSCHCmnIsSplSubfrm(swPtCnt, sfCount,
10253 RG_SCH_CMN_5_MS_PRD, &subfrmInfo);
10255 if(isSplfrm == TRUE)
10258 /* DwPTS Scheduling Changes Start */
10259 if (cell->splSubfrmCfg.isDlDataAllowed == TRUE)
10261 sf->sfType = RG_SCH_SPL_SF_DATA;
10265 sf->sfType = RG_SCH_SPL_SF_NO_DATA;
10267 /* DwPTS Scheduling Changes End */
10271 /* DwPTS Scheduling Changes Start */
10272 if (sf->sfNum != 0)
10274 sf->sfType = RG_SCH_DL_SF;
10278 sf->sfType = RG_SCH_DL_SF_0;
10280 /* DwPTS Scheduling Changes End */
10283 /* Calculate the number of CCEs per subframe in the cell */
10284 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][sf->sfNum];
10285 if(cell->dynCfiCb.isDynCfiEnb == TRUE)
10287 /* In case if Dynamic CFI feature is enabled, default CFI
10288 * value 1 is used */
10289 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][1];
10293 if (sf->sfType == RG_SCH_SPL_SF_DATA)
10295 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
10299 sf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi)];
10304 /* Intialize the RACH response scheduling related infromation */
10305 if(rgSCHCmnDlRachInfoInit(cell) != ROK)
10310 /* Allocate PRACH preamble list */
10311 rgSCHCmnDlCreateRachPrmLst(cell);
10313 /* Initialize PHICH offset information */
10314 rgSCHCmnDlPhichOffsetInit(cell);
10316 /* Update the size of HARQ ACK/NACK feedback table */
10317 /* The array size is increased by 2 to have enough free indices, where other
10318 * indices are busy waiting for HARQ feedback */
10319 cell->ackNackFdbkArrSize = rgSchTddANFdbkMapTbl[cell->ulDlCfgIdx] + 2;
10321 /* Initialize expected HARQ ACK/NACK feedback time */
10322 rgSCHCmnDlANFdbkInit(cell);
10324 /* Initialize UL association set index */
10325 if(cell->ulDlCfgIdx != 0)
10327 rgSCHCmnDlKdashUlAscInit(cell);
10330 if (cfg->isCpDlExtend == TRUE)
10332 cp = RG_SCH_CMN_EXT_CP;
10334 cell->splSubfrmCfg.dwPts =
10335 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlDwPts;
10337 if ( cell->splSubfrmCfg.dwPts == 0 )
10339 cell->isDwPtsCnted = FALSE;
10343 cell->isDwPtsCnted = TRUE;
10346 if(cfg->isCpUlExtend == TRUE)
10348 cell->splSubfrmCfg.upPts =
10349 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlExtUpPts;
10353 cell->splSubfrmCfg.upPts =
10354 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].extDlNorUpPts;
10359 cp = RG_SCH_CMN_NOR_CP;
10361 cell->splSubfrmCfg.dwPts =
10362 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlDwPts;
10363 cell->isDwPtsCnted = TRUE;
10365 if(cfg->isCpUlExtend == TRUE)
10367 cell->splSubfrmCfg.upPts =
10368 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlExtUpPts;
10372 cell->splSubfrmCfg.upPts =
10373 rgSchTddSplSubfrmInfoTbl[splSubfrmIdx].norDlNorUpPts;
10377 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10378 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++,cfiIdx++)
10380 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10381 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10382 [cell->numTxAntPorts]][cfiIdx];
10383 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10384 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10385 [cell->numTxAntPorts]][cfiIdx];
10388 /* Initializing the values of CFI parameters */
10389 if(cell->dynCfiCb.isDynCfiEnb)
10391 /* If DCFI is enabled, current CFI value will start from 1 */
10392 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10396 /* If DCFI is disabled, current CFI value is set as default max allowed CFI value */
10397 cellSch->dl.currCfi = RGSCH_MIN(cell->dynCfiCb.maxCfi, cellSch->cfiCfg.cfi);
10398 cellSch->dl.newCfi = cellSch->dl.currCfi;
10401 /* Include CRS REs while calculating Efficiency
10402 * The number of Resource Elements occupied by CRS depends on Number of
10403 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10404 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10405 * details of the same. Please note that PDCCH overlap symbols would not
10406 * considered in CRS REs deduction */
10407 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10409 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10410 - numPdcchSym) *RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10413 /* DwPTS Scheduling Changes Start */
10414 antPortIdx = (cell->numTxAntPorts == 1)? 0:
10415 ((cell->numTxAntPorts == 2)? 1: 2);
10417 if (cp == RG_SCH_CMN_NOR_CP)
10419 splSfIdx = (splSubfrmIdx == 4)? 1: 0;
10423 splSfIdx = (splSubfrmIdx == 3)? 1: 0;
10426 numCrs = rgSchCmnDwptsCrs[splSfIdx][antPortIdx];
10428 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI-1; cfi++)
10430 /* If CFI is 2 and Ant Port is 4, don't consider the sym 1 CRS REs */
10431 if (antPortIdx == 2 && cfi == 2)
10435 cellSch->dl.numReDwPts[cfi] = ((cell->splSubfrmCfg.dwPts - cfi)*
10436 RB_SCH_CMN_NUM_SCS_PER_RB) - numCrs;
10438 /* DwPTS Scheduling Changes End */
10440 if (cfg->maxDlBwPerUe == 0)
10442 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10446 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10448 if (cfg->maxDlRetxBw == 0)
10450 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10454 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10456 /* Fix: MUE_PERTTI_DL*/
10457 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10458 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10459 if (cfg->maxUePerDlSf == 0)
10461 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10463 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10464 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10465 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10467 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10468 "Invalid configuration !: "
10469 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10470 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10474 else if (!cfg->maxCcchPerDlSf)
10476 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10477 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10478 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10479 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10480 * FLE crash in PHY as PHY has limit of 16 max*/
10481 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10485 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10487 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10492 /*ccpu00118273 - ADD - start */
10493 cmLListInit(&cellSch->dl.msg4RetxLst);
10495 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10498 #ifdef RG_PHASE2_SCHED
10499 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10501 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10503 if (cfg->dlfsCfg.isDlFreqSel)
10505 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10511 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10514 /* Power related configuration */
10515 ret = rgSCHPwrCellCfg(cell, cfg);
10521 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10522 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10523 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10524 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10525 cellSch->dl.msg4pAVal = cfg->msg4pAVal;
10528 #else /* LTE_TDD */
10530 * @brief This function handles the configuration of cell for the first
10531 * time by the scheduler.
10535 * Function: rgSCHCmnDlRgrCellCfg
10536 * Purpose: Configuration received is stored into the data structures
10537 * Also, update the scheduler with the number of frames of
10538 * RACH preamble transmission.
10540 * Invoked by: BO and Scheduler
10542 * @param[in] RgSchCellCb* cell
10543 * @param[in] RgrCellCfg* cfg
10544 * @param[in] RgSchErrInfo* err
10549 PRIVATE S16 rgSCHCmnDlRgrCellCfg
10556 PRIVATE S16 rgSCHCmnDlRgrCellCfg(cell, cfg, err)
10563 RgSchCmnCell *cellSch;
10570 TRC2(rgSCHCmnDlRgrCellCfg);
10572 cellSch = RG_SCH_CMN_GET_CELL(cell);
10574 /* Initialize the parameters with the ones received in the */
10575 /* configuration. */
10577 /* Added matrix 'rgRaPrmblToRaFrmTbl' for computation of RA
10578 * sub-frames from preamble format */
10579 cellSch->dl.numRaSubFrms = rgRaPrmblToRaFrmTbl[cell->rachCfg.preambleFormat];
10581 /*[ccpu00138532]-ADD-fill the Msg4 Harq data */
10582 cell->dlHqCfg.maxMsg4HqTx = cfg->dlHqCfg.maxMsg4HqTx;
10584 /* Msg4 Tx Delay = (HARQ_RTT * MAX_MSG4_HARQ_RETX) +
10585 3 TTI (MAX L1+L2 processing delay at the UE) */
10586 cellSch->dl.msg4TxDelay = (cfg->dlHqCfg.maxMsg4HqTx-1) *
10587 rgSchCmnHarqRtt[7] + 3;
10589 if (cell->bwCfg.dlTotalBw <= 10)
10600 if (cell->isCpDlExtend == TRUE)
10602 cp = RG_SCH_CMN_EXT_CP;
10607 cp = RG_SCH_CMN_NOR_CP;
10611 /* Initializing the cqiToEffTbl and cqiToTbsTbl for every CFI value */
10612 for(cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, cfiIdx++)
10614 cellSch->dl.cqiToTbsTbl[0][cfi] = rgSchCmnCqiToTbs[0][cp][cfiIdx];
10616 cellSch->dl.emtcCqiToTbsTbl[0][cfi] = rgSchEmtcCmnCqiToTbs[0][cp][cfiIdx];
10618 cellSch->dl.cqiToEffTbl[0][cfi] = rgSchCmnEffTbl[0][cp][rgSchCmnAntIdx\
10619 [cell->numTxAntPorts]][cfiIdx];
10620 cellSch->dl.cqiToTbsTbl[1][cfi] = rgSchCmnCqiToTbs[1][cp][cfiIdx];
10622 cellSch->dl.emtcCqiToTbsTbl[1][cfi] = rgSchEmtcCmnCqiToTbs[1][cp][cfiIdx];
10624 cellSch->dl.cqiToEffTbl[1][cfi] = rgSchCmnEffTbl[1][cp][rgSchCmnAntIdx\
10625 [cell->numTxAntPorts]][cfiIdx];
10628 /* Initializing the values of CFI parameters */
10629 if(cell->dynCfiCb.isDynCfiEnb)
10631 /* If DCFI is enabled, current CFI value will start from 1 */
10632 cellSch->dl.currCfi = cellSch->dl.newCfi = 1;
10636 /* If DCFI is disabled, current CFI value is set as default CFI value */
10637 cellSch->dl.currCfi = cellSch->cfiCfg.cfi;
10638 cellSch->dl.newCfi = cellSch->dl.currCfi;
10641 /* Include CRS REs while calculating Efficiency
10642 * The number of Resource Elements occupied by CRS depends on Number of
10643 * Antenna Ports. Please refer to Section 6.10.1 of 3GPP TS 36.211 V8.8.0.
10644 * Also, please refer to Figures 6.10.1.2-1 and 6.10.1.2-2 for diagrammatic
10645 * details of the same. Please note that PDCCH overlap symbols would not
10646 * considered in CRS REs deduction */
10647 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++, numPdcchSym++)
10649 cellSch->dl.noResPerRb[cfi] = (((noSymPerSlot * RG_SCH_CMN_NUM_SLOTS_PER_SF)
10650 - numPdcchSym) * RB_SCH_CMN_NUM_SCS_PER_RB) - rgSchCmnNumResForCrs[cell->numTxAntPorts];
10653 if (cfg->maxDlBwPerUe == 0)
10655 cellSch->dl.maxDlBwPerUe = RG_SCH_CMN_MAX_DL_BW_PERUE;
10659 cellSch->dl.maxDlBwPerUe = cfg->maxDlBwPerUe;
10661 if (cfg->maxDlRetxBw == 0)
10663 cellSch->dl.maxDlRetxBw = RG_SCH_CMN_MAX_DL_RETX_BW;
10667 cellSch->dl.maxDlRetxBw = cfg->maxDlRetxBw;
10670 /* Fix: MUE_PERTTI_DL*/
10671 cellSch->dl.maxUePerDlSf = cfg->maxUePerDlSf;
10672 cellSch->dl.maxUeNewTxPerTti = cfg->maxDlUeNewTxPerTti;
10673 if (cfg->maxUePerDlSf == 0)
10675 cellSch->dl.maxUePerDlSf = RG_SCH_CMN_MAX_UE_PER_DL_SF;
10677 /* Fix: MUE_PERTTI_DL syed validating Cell Configuration */
10678 if (cellSch->dl.maxUePerDlSf < cellSch->dl.maxUeNewTxPerTti)
10680 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
10681 "FAILED MaxUePerDlSf(%u) < MaxDlUeNewTxPerTti(%u)",
10682 cellSch->dl.maxUePerDlSf,
10683 cellSch->dl.maxUeNewTxPerTti);
10686 /*[ccpu00138609]-ADD- Configure the Max CCCH Counter */
10687 if (cfg->maxCcchPerDlSf > cfg->maxUePerDlSf)
10689 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid configuration !: "
10690 "maxCcchPerDlSf %u > maxUePerDlSf %u",
10691 cfg->maxCcchPerDlSf, cfg->maxUePerDlSf );
10695 else if (!cfg->maxCcchPerDlSf)
10697 /* ccpu00143032: maxCcchPerDlSf 0 means not configured by application
10698 * hence setting to maxUePerDlSf. If maxCcchPerDlSf is 0 then scheduler
10699 * does't consider CCCH allocation in MaxUePerTti cap. Hence more than
10700 * 4UEs getting schduled & SCH expects >16 Hq PDUs in a TTI which causes
10701 * FLE crash in PHY as PHY has limit of 16 max*/
10702 cellSch->dl.maxCcchPerDlSf = cfg->maxUePerDlSf;
10706 cellSch->dl.maxCcchPerDlSf = cfg->maxCcchPerDlSf;
10710 if (rgSCHCmnDlCnsdrCmnRt(cell, &cfg->dlCmnCodeRate) != ROK)
10714 cmLListInit(&cellSch->dl.msg4RetxLst);
10716 cmLListInit(&cellSch->dl.ccchSduRetxLst);
10719 #ifdef RG_PHASE2_SCHED
10720 if (cellSch->apisDlfs == NULLP) /* DFLS specific initialization */
10722 cellSch->apisDlfs = &rgSchDlfsSchdTbl[cfg->dlfsSchdType];
10724 if (cfg->dlfsCfg.isDlFreqSel)
10726 ret = cellSch->apisDlfs->rgSCHDlfsCellCfg(cell, cfg, err);
10732 cellSch->dl.isDlFreqSel = cfg->dlfsCfg.isDlFreqSel;
10735 /* Power related configuration */
10736 ret = rgSCHPwrCellCfg(cell, cfg);
10742 cellSch->dl.bcchTxPwrOffset = cfg->bcchTxPwrOffset;
10743 cellSch->dl.pcchTxPwrOffset = cfg->pcchTxPwrOffset;
10744 cellSch->dl.rarTxPwrOffset = cfg->rarTxPwrOffset;
10745 cellSch->dl.phichTxPwrOffset = cfg->phichTxPwrOffset;
10746 RG_SCH_RESET_HCSG_DL_PRB_CNTR(&cellSch->dl);
10749 #endif /* LTE_TDD */
10751 /***********************************************************
10753 * Func : rgSCHCmnUlCalcReqRbCeil
10755 * Desc : Calculate RB required to satisfy 'bytes' for
10757 * Returns number of RBs such that requirement
10758 * is necessarily satisfied (does a 'ceiling'
10761 * Ret : Required RBs (U8)
10767 **********************************************************/
10769 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil
10773 RgSchCmnUlCell *cellUl
10776 PUBLIC U8 rgSCHCmnUlCalcReqRbCeil(bytes, cqi, cellUl)
10779 RgSchCmnUlCell *cellUl;
10782 U32 numRe = RGSCH_CEIL((bytes * 8) * 1024, rgSchCmnUlCqiTbl[cqi].eff);
10783 TRC2(rgSCHCmnUlCalcReqRbCeil);
10784 return ((U8)RGSCH_CEIL(numRe, RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl)));
10787 /***********************************************************
10789 * Func : rgSCHCmnPrecompMsg3Vars
10791 * Desc : Precomputes the following for msg3 allocation:
10792 * 1. numSb and Imcs for msg size A
10793 * 2. numSb and Imcs otherwise
10797 * Notes: The corresponding vars in cellUl struct is filled
10802 **********************************************************/
10804 PRIVATE S16 rgSCHCmnPrecompMsg3Vars
10806 RgSchCmnUlCell *cellUl,
10813 PRIVATE S16 rgSCHCmnPrecompMsg3Vars(cellUl, ccchCqi, msgSzA, sbSize, isEcp)
10814 RgSchCmnUlCell *cellUl;
10826 U16 msg3GrntSz = 0;
10828 TRC2(rgSCHCmnPrecompMsg3Vars);
10830 if (ccchCqi > cellUl->max16qamCqi)
10832 ccchCqi = cellUl->max16qamCqi;
10834 /* #ifndef RG_SCH_CMN_EXP_CP_SUP For ECP Pick the index 1 */
10836 ccchTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10837 ccchMcs = rgSCHCmnUlGetIMcsFrmITbs(ccchTbs, CM_LTE_UE_CAT_1);
10839 /* MCS should fit in 4 bits in RAR */
10845 /* Limit the ccchMcs to 15 as it
10846 * can be inferred from 36.213, section 6.2 that msg3 imcs
10848 * Since, UE doesn't exist right now, we use CAT_1 for ue
10850 while((ccchMcs = (rgSCHCmnUlGetIMcsFrmITbs(
10851 rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi],CM_LTE_UE_CAT_1))
10853 RG_SCH_CMN_MAX_MSG3_IMCS)
10858 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ccchCqi];
10860 if (msgSzA < RGSCH_MIN_MSG3_GRNT_SZ)
10864 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(msgSzA, ccchCqi, cellUl), sbSize);
10866 numRb = numSb * sbSize;
10867 msg3GrntSz = 8 * msgSzA;
10869 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10872 numRb = numSb * sbSize;
10874 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10878 /* Reversed(Corrected) the assignment for preamble-GrpA
10879 * Refer- TG36.321- section- 5.1.2*/
10880 cellUl->ra.prmblBNumSb = numSb;
10881 cellUl->ra.prmblBIMcs = ccchMcs;
10882 numSb = RGSCH_CEIL(rgSCHCmnUlCalcReqRbCeil(RGSCH_MIN_MSG3_GRNT_SZ, \
10886 numRb = numSb * sbSize;
10887 msg3GrntSz = 8 * RGSCH_MIN_MSG3_GRNT_SZ;
10888 while( (rgTbSzTbl[0][iTbs][numRb - 1]) < msg3GrntSz)
10891 numRb = numSb * sbSize;
10893 while (rgSchCmnMult235Tbl[numSb].match != numSb)
10897 /* Reversed(Corrected) the assignment for preamble-GrpA
10898 * Refer- TG36.321- section- 5.1.2*/
10899 cellUl->ra.prmblANumSb = numSb;
10900 cellUl->ra.prmblAIMcs = ccchMcs;
10904 PUBLIC U32 gPrntPucchDet=0;
10907 /***********************************************************
10909 * Func : rgSCHCmnUlCalcAvailBw
10911 * Desc : Calculates bandwidth available for PUSCH scheduling.
10913 * Ret : S16 (ROK/RFAILED)
10919 **********************************************************/
10921 PRIVATE S16 rgSCHCmnUlCalcAvailBw
10924 RgrCellCfg *cellCfg,
10930 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
10932 RgrCellCfg *cellCfg;
10939 U8 ulBw = cell->bwCfg.ulTotalBw;
10940 U8 n2Rb = cell->pucchCfg.resourceSize;
10941 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
10942 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
10943 U8 n1Cs = cell->pucchCfg.cyclicShift;
10950 U8 exclRb; /* RBs to exclude */
10953 /* To avoid PUCCH and PUSCH collision issue */
10957 /* Maximum value of M as per Table 10.1-1 */
10958 U8 M[RGSCH_MAX_TDD_UL_DL_CFG] = {1, 2, 4, 3, 4, 9, 1};
10960 TRC2(rgSCHCmnUlCalcAvailBw);
10962 if (cell->isCpUlExtend)
10967 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
10969 /* Considering the max no. of CCEs for PUSCH BW calculation
10970 * based on min mi value */
10971 if (cell->ulDlCfgIdx == 0 || cell->ulDlCfgIdx == 6)
10980 totalCce = cell->dynCfiCb.cfi2NCceTbl[mi][cfi];
10982 P = rgSCHCmnGetPValFrmCCE(cell, totalCce-1);
10983 n1PlusOne = cell->rgSchTddNpValTbl[P + 1];
10984 n1Max = (M[cell->ulDlCfgIdx] - 1)*n1PlusOne + (totalCce-1) + n1Pucch;
10986 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
10988 n1RbPart = (c*n1Cs)/pucchDeltaShft;
10989 n1Rb = (n1Max - n1RbPart)/ n1PerRb;
10990 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
10992 /* get the total Number of RB's to be excluded for PUSCH */
10994 if(n1Pucch < n1RbPart)
11000 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11002 puschRbStart = exclRb/2 + 1;
11004 /* Num of PUCCH RBs = puschRbStart*2 */
11005 if (puschRbStart * 2 >= ulBw)
11007 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11011 *rbStartRef = puschRbStart;
11012 *bwAvailRef = ulBw - puschRbStart * 2;
11014 if(cell->pucchCfg.maxPucchRb !=0 &&
11015 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11017 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11024 /***********************************************************
11026 * Func : rgSCHCmnUlCalcAvailBw
11028 * Desc : Calculates bandwidth available for PUSCH scheduling.
11030 * Ret : S16 (ROK/RFAILED)
11036 **********************************************************/
11038 PRIVATE S16 rgSCHCmnUlCalcAvailBw
11041 RgrCellCfg *cellCfg,
11047 PRIVATE S16 rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, rbStartRef, bwAvailRef)
11049 RgrCellCfg *cellCfg;
11056 U8 ulBw = cell->bwCfg.ulTotalBw;
11057 U8 n2Rb = cell->pucchCfg.resourceSize;
11058 U8 pucchDeltaShft = cell->pucchCfg.deltaShift;
11059 U16 n1Pucch = cell->pucchCfg.n1PucchAn;
11060 U8 n1Cs = cell->pucchCfg.cyclicShift;
11066 U8 exclRb; /* RBs to exclude */
11070 U16 numOfN3PucchRb;
11071 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11074 TRC2(rgSCHCmnUlCalcAvailBw);
11076 if (cell->isCpUlExtend)
11081 n1PerRb = c * 12 / pucchDeltaShft; /* 12/18/36 */
11083 totalCce = cell->dynCfiCb.cfi2NCceTbl[0][cfi];
11085 n1Max = n1Pucch + totalCce-1;
11087 /* ccpu00129978- MOD- excluding RBs based on formula in section 5.4.3 in
11089 n1RbPart = (c*n1Cs)/pucchDeltaShft;
11090 n1Rb = (U8)((n1Max - n1RbPart) / n1PerRb);
11091 mixedRb = RGSCH_CEIL(n1Cs, 8); /* same as 'mixedRb = n1Cs ? 1 : 0' */
11093 /* get the total Number of RB's to be excluded for PUSCH */
11095 if(n1Pucch < n1RbPart)
11101 exclRb = n2Rb + mixedRb + n1Rb; /* RBs to exclude */
11103 /*Support for PUCCH Format 3*/
11105 if (cell->isPucchFormat3Sptd)
11107 numOfN3PucchRb = RGSCH_CEIL(cellSch->dl.maxUePerDlSf,5);
11108 exclRb = exclRb + numOfN3PucchRb;
11111 puschRbStart = exclRb/2 + 1;
11115 #ifndef ALIGN_64BIT
11116 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%ld:%d:%d:%d:%d:%d]\n",
11117 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11119 printf("CA_DBG:: puschRbStart:n1Rb:mixedRb:n1PerRb:totalCce:n1Max:n1RbPart:n2Rb::[%d:%d] [%d:%d:%d:%d:%d:%d:%d:%d]\n",
11120 cell->crntTime.sfn, cell->crntTime.slot, puschRbStart, n1Rb, mixedRb,n1PerRb, totalCce, n1Max, n1RbPart, n2Rb);
11124 if (puschRbStart*2 >= ulBw)
11126 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"No bw available for PUSCH");
11130 *rbStartRef = puschRbStart;
11131 *bwAvailRef = ulBw - puschRbStart * 2;
11133 if(cell->pucchCfg.maxPucchRb !=0 &&
11134 (puschRbStart * 2 > cell->pucchCfg.maxPucchRb))
11136 cell->dynCfiCb.maxCfi = RGSCH_MIN(cfi-1, cell->dynCfiCb.maxCfi);
11145 /***********************************************************
11147 * Func : rgSCHCmnUlCellInit
11149 * Desc : Uplink scheduler initialisation for cell.
11157 **********************************************************/
11159 PRIVATE S16 rgSCHCmnUlCellInit
11162 RgrCellCfg *cellCfg
11165 PRIVATE S16 rgSCHCmnUlCellInit(cell, cellCfg)
11167 RgrCellCfg *cellCfg;
11171 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11172 U8 maxUePerUlSf = cellCfg->maxUePerUlSf;
11174 /* Added configuration for maximum number of MSG3s */
11175 U8 maxMsg3PerUlSf = cellCfg->maxMsg3PerUlSf;
11177 U8 maxUlBwPerUe = cellCfg->maxUlBwPerUe;
11178 U8 sbSize = cellCfg->puschSubBand.size;
11186 U16 ulDlCfgIdx = cell->ulDlCfgIdx;
11187 /* [ccpu00127294]-MOD-Change the max Ul subfrms size in TDD */
11188 U8 maxSubfrms = 2 * rgSchTddNumUlSf[ulDlCfgIdx];
11189 U8 ulToDlMap[12] = {0}; /* maximum 6 Subframes in UL * 2 */
11190 U8 maxUlsubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
11191 [RGSCH_NUM_SUB_FRAMES-1];
11195 U8 maxSubfrms = RG_SCH_CMN_UL_NUM_SF;
11201 #if (defined(LTE_L2_MEAS) )
11202 Inst inst = cell->instIdx;
11203 #endif /* #if (defined(LTE_L2_MEAS) || defined(DEBUGP) */
11204 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
11206 TRC2(rgSCHCmnUlCellInit);
11208 cellUl->maxUeNewTxPerTti = cellCfg->maxUlUeNewTxPerTti;
11209 if (maxUePerUlSf == 0)
11211 maxUePerUlSf = RG_SCH_CMN_MAX_UE_PER_UL_SF;
11214 if (maxMsg3PerUlSf == 0)
11216 maxMsg3PerUlSf = RG_SCH_CMN_MAX_MSG3_PER_UL_SF;
11218 /* fixed the problem while sending raRsp
11219 * if maxMsg3PerUlSf is greater than
11220 * RGSCH_MAX_RNTI_PER_RARNTI
11222 if(maxMsg3PerUlSf > RGSCH_MAX_RNTI_PER_RARNTI)
11224 maxMsg3PerUlSf = RGSCH_MAX_RNTI_PER_RARNTI;
11227 if(maxMsg3PerUlSf > maxUePerUlSf)
11229 maxMsg3PerUlSf = maxUePerUlSf;
11232 /*cellUl->maxAllocPerUlSf = maxUePerUlSf + maxMsg3PerUlSf;*/
11233 /*Max MSG3 should be a subset of Max UEs*/
11234 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11235 cellUl->maxMsg3PerUlSf = maxMsg3PerUlSf;
11237 cellUl->maxAllocPerUlSf = maxUePerUlSf;
11239 /* Fix: MUE_PERTTI_UL syed validating Cell Configuration */
11240 if (cellUl->maxAllocPerUlSf < cellUl->maxUeNewTxPerTti)
11242 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
11243 "FAILED: MaxUePerUlSf(%u) < MaxUlUeNewTxPerTti(%u)",
11244 cellUl->maxAllocPerUlSf,
11245 cellUl->maxUeNewTxPerTti);
11251 for(idx = 0; idx < RGSCH_SF_ALLOC_SIZE; idx++)
11253 for(idx = 0; idx < RGSCH_NUM_SUB_FRAMES; idx++)
11257 ret = rgSCHUtlAllocSBuf(inst, (Data **)&(cell->sfAllocArr[idx].
11258 ulUeInfo.ulAllocInfo), (cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc)));
11261 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"Memory allocation failed ");
11266 if (maxUlBwPerUe == 0)
11268 /* ccpu00139362- Setting to configured UL BW instead of MAX BW(100)*/
11269 maxUlBwPerUe = cell->bwCfg.ulTotalBw;
11271 cellUl->maxUlBwPerUe = maxUlBwPerUe;
11273 /* FOR RG_SCH_CMN_EXT_CP_SUP */
11274 if (!cellCfg->isCpUlExtend)
11276 cellUl->ulNumRePerRb = 12 * (14 - RGSCH_UL_SYM_DMRS_SRS);
11280 cellUl->ulNumRePerRb = 12 * (12 - RGSCH_UL_SYM_DMRS_SRS);
11283 if (sbSize != rgSchCmnMult235Tbl[sbSize].match)
11285 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"Invalid subband size %d", sbSize);
11288 //Setting the subband size to 4 which is size of VRBG in 5GTF
11290 sbSize = MAX_5GTF_VRBG_SIZE;
11293 maxSbPerUe = maxUlBwPerUe / sbSize;
11294 if (maxSbPerUe == 0)
11296 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnUlCellInit(): "
11297 "maxUlBwPerUe/sbSize is zero");
11300 cellUl->maxSbPerUe = rgSchCmnMult235Tbl[maxSbPerUe].prvMatch;
11302 /* CQI related updations */
11303 if ((!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->ulCmnCodeRate.ccchCqi))
11304 || (!RG_SCH_CMN_UL_IS_CQI_VALID(cellCfg->trgUlCqi.trgCqi)))
11306 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnUlCellInit(): "
11310 cellUl->dfltUlCqi = cellCfg->ulCmnCodeRate.ccchCqi;
11312 /* Changed the logic to determine maxUlCqi.
11313 * For a 16qam UE, maxUlCqi is the CQI Index at which
11314 * efficiency is as close as possible to RG_SCH_MAX_CODE_RATE_16QAM
11315 * Refer to 36.213-8.6.1 */
11316 for (i = RG_SCH_CMN_UL_NUM_CQI - 1;i > 0; --i)
11318 RLOG_ARG2(L_INFO,DBG_CELLID,cell->cellId,
11321 rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i]);
11322 #ifdef MAC_SCH_STATS
11323 /* ccpu00128489 ADD Update mcs in hqFailStats here instead of at CRC
11324 * since CQI to MCS mapping does not change. The only exception is for
11325 * ITBS = 19 where the MCS can be 20 or 21 based on the UE cat. We
11326 * choose 20, instead of 21, ie UE_CAT_3 */
11327 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11328 RG_SCH_CMN_UL_TBS_TO_MCS(iTbs, hqFailStats.ulCqiStat[i - 1].mcs);
11331 for (i = RG_SCH_CMN_UL_NUM_CQI - 1; i != 0; --i)
11333 /* Fix for ccpu00123912*/
11334 iTbs = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][i];
11335 if (iTbs <= RGSCH_UL_16QAM_MAX_ITBS) /* corresponds to 16QAM */
11337 RLOG_ARG1(L_INFO,DBG_CELLID,cell->cellId,
11338 "16 QAM CQI %u", i);
11339 cellUl->max16qamCqi = i;
11345 /* Precompute useful values for RA msg3 */
11346 ret = rgSCHCmnPrecompEmtcMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11347 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11354 /* Precompute useful values for RA msg3 */
11355 ret = rgSCHCmnPrecompMsg3Vars(cellUl, cellCfg->ulCmnCodeRate.ccchCqi,
11356 cell->rachCfg.msgSizeGrpA, sbSize, cell->isCpUlExtend);
11362 cellUl->sbSize = sbSize;
11365 cellUl->numUlSubfrms = maxSubfrms;
11367 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->ulSfArr,
11368 cellUl->numUlSubfrms * sizeof(RgSchUlSf));
11372 cellUl->numUlSubfrms = 0;
11376 /* store the DL subframe corresponding to the PUSCH offset
11377 * in their respective UL subframe */
11378 for(i=0; i < RGSCH_NUM_SUB_FRAMES; i++)
11380 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][i] != 0)
11382 subfrm = (i + rgSchTddPuschTxKTbl[ulDlCfgIdx][i]) % \
11383 RGSCH_NUM_SUB_FRAMES;
11384 subfrm = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][subfrm]-1;
11385 dlIdx = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][i]-1;
11386 RGSCH_ARRAY_BOUND_CHECK( cell->instIdx, ulToDlMap, subfrm);
11387 ulToDlMap[subfrm] = dlIdx;
11390 /* Copy the information in the remaining UL subframes based
11391 * on number of HARQ processes */
11392 for(i=maxUlsubfrms; i < maxSubfrms; i++)
11394 subfrm = i-maxUlsubfrms;
11395 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, i);
11396 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, ulToDlMap, subfrm)
11397 ulToDlMap[i] = ulToDlMap[subfrm];
11401 for (cfi = 1; cfi < RG_SCH_CMN_MAX_CFI; cfi++)
11404 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11406 ret = rgSCHCmnUlCalcAvailBw(cell, cellCfg, cfi, &rbStart, &bwAvail);
11415 cell->ulAvailBw = bwAvail;
11418 numSb = bwAvail/sbSize;
11420 cell->dynCfiCb.bwInfo[cfi].startRb = rbStart;
11421 cell->dynCfiCb.bwInfo[cfi].numSb = numSb;
11424 if(0 == cell->dynCfiCb.maxCfi)
11426 RLOG_ARG3(L_ERROR,DBG_CELLID,cell->cellId,
11427 "Incorrect Default CFI(%u), maxCfi(%u), maxPucchRb(%d)",
11428 cellSch->cfiCfg.cfi, cell->dynCfiCb.maxCfi,
11429 cell->pucchCfg.maxPucchRb);
11435 cellUl->dmrsArrSize = cell->dynCfiCb.bwInfo[1].numSb;
11436 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cellUl->dmrsArr,
11437 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11442 for (i = 0; i < cellUl->dmrsArrSize; ++i)
11444 cellUl->dmrsArr[i] = cellCfg->puschSubBand.dmrs[i];
11447 /* Init subframes */
11448 for (i = 0; i < maxSubfrms; ++i)
11450 ret = rgSCHUtlUlSfInit(cell, &cellUl->ulSfArr[i], i,
11451 cellUl->maxAllocPerUlSf);
11454 for (; i != 0; --i)
11456 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[i-1]);
11458 /* ccpu00117052 - MOD - Passing double pointer
11459 for proper NULLP assignment*/
11460 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)(&(cellUl->dmrsArr)),
11461 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11463 /* ccpu00117052 - MOD - Passing double pointer
11464 for proper NULLP assignment*/
11465 rgSCHUtlFreeSBuf(cell->instIdx,
11466 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11471 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cellUl);
11476 * @brief Scheduler processing on cell configuration.
11480 * Function : rgSCHCmnRgrCellCfg
11482 * This function does requisite initialisation
11483 * and setup for scheduler1 when a cell is
11486 * @param[in] RgSchCellCb *cell
11487 * @param[in] RgrCellCfg *cellCfg
11488 * @param[out] RgSchErrInfo *err
11494 PUBLIC S16 rgSCHCmnRgrCellCfg
11497 RgrCellCfg *cellCfg,
11501 PUBLIC S16 rgSCHCmnRgrCellCfg(cell, cellCfg, err)
11503 RgrCellCfg *cellCfg;
11508 RgSchCmnCell *cellSch;
11509 TRC2(rgSCHCmnRgrCellCfg);
11511 /* As part of RGR cell configuration, validate the CRGCellCfg
11512 * There is no trigger for crgCellCfg from SC1 */
11513 /* Removed failure check for Extended CP */
11515 if (((ret = rgSCHUtlAllocSBuf(cell->instIdx,
11516 (Data**)&(cell->sc.sch), (sizeof(RgSchCmnCell)))) != ROK))
11518 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
11519 "Memory allocation FAILED");
11520 err->errCause = RGSCHERR_SCH_CFG;
11523 cellSch = (RgSchCmnCell *)(cell->sc.sch);
11524 cellSch->cfiCfg = cellCfg->cfiCfg;
11525 cellSch->trgUlCqi.trgCqi = cellCfg->trgUlCqi.trgCqi;
11526 /* Initialize the scheduler refresh timer queues */
11527 cellSch->tmrTqCp.nxtEnt = 0;
11528 cellSch->tmrTqCp.tmrLen = RG_SCH_CMN_NUM_REFRESH_Q;
11530 /* RACHO Intialize the RACH ded Preamble Information */
11531 rgSCHCmnCfgRachDedPrm(cell);
11533 /* Initialize 'Np' value for each 'p' used for
11534 * HARQ ACK/NACK reception */
11535 rgSCHCmnDlNpValInit(cell);
11538 /* Initialize 'Np' value for each 'p' used for
11539 * HARQ ACK/NACK reception */
11541 rgSCHCmnDlNpValInit(cell);
11544 /* Now perform uplink related initializations */
11545 ret = rgSCHCmnUlCellInit(cell, cellCfg);
11548 /* There is no downlink deinit to be performed */
11549 err->errCause = RGSCHERR_SCH_CFG;
11552 ret = rgSCHCmnDlRgrCellCfg(cell, cellCfg, err);
11555 err->errCause = RGSCHERR_SCH_CFG;
11558 /* DL scheduler has no initializations to make */
11559 /* As of now DL scheduler always returns ROK */
11561 rgSCHCmnGetDciFrmtSizes(cell);
11562 rgSCHCmnGetCqiDciFrmt2AggrLvl(cell);
11564 rgSCHCmnGetEmtcDciFrmtSizes(cell);
11565 rgSCHCmnGetCqiEmtcDciFrmt2AggrLvl(cell);
11566 #endif /* EMTC_ENABLE */
11569 if(TRUE == cellCfg->emtcEnable)
11571 cellSch->apisEmtcUl = &rgSchEmtcUlSchdTbl[0];
11572 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11579 cellSch->apisUl = &rgSchUlSchdTbl[RG_SCH_CMN_GET_UL_SCHED_TYPE(cell)];
11580 ret = cellSch->apisUl->rgSCHRgrUlCellCfg(cell, cellCfg, err);
11586 if(TRUE == cellCfg->emtcEnable)
11588 cellSch->apisEmtcDl = &rgSchEmtcDlSchdTbl[0];
11589 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11596 cellSch->apisDl = &rgSchDlSchdTbl[RG_SCH_CMN_GET_DL_SCHED_TYPE(cell)];
11598 /* Perform SPS specific initialization for the cell */
11599 ret = rgSCHCmnSpsCellCfg(cell, cellCfg, err);
11605 ret = cellSch->apisDl->rgSCHRgrDlCellCfg(cell, cellCfg, err);
11610 rgSCHCmnInitVars(cell);
11613 } /* rgSCHCmnRgrCellCfg*/
11617 * @brief This function handles the reconfiguration of cell.
11621 * Function: rgSCHCmnRgrCellRecfg
11622 * Purpose: Update the reconfiguration parameters.
11624 * Invoked by: Scheduler
11626 * @param[in] RgSchCellCb* cell
11631 PUBLIC S16 rgSCHCmnRgrCellRecfg
11634 RgrCellRecfg *recfg,
11638 PUBLIC S16 rgSCHCmnRgrCellRecfg(cell, recfg, err)
11640 RgrCellRecfg *recfg;
11645 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11646 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11648 TRC2(rgSCHCmnRgrCellRecfg);
11650 if (recfg->recfgTypes & RGR_CELL_UL_CMNRATE_RECFG)
11652 U8 oldCqi = cellUl->dfltUlCqi;
11653 if (!RG_SCH_CMN_UL_IS_CQI_VALID(recfg->ulCmnCodeRate.ccchCqi))
11655 err->errCause = RGSCHERR_SCH_CFG;
11656 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnRgrCellRecfg(): "
11660 cellUl->dfltUlCqi = recfg->ulCmnCodeRate.ccchCqi;
11661 ret = rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11662 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11665 cellUl->dfltUlCqi = oldCqi;
11666 rgSCHCmnPrecompMsg3Vars(cellUl, recfg->ulCmnCodeRate.ccchCqi,
11667 cell->rachCfg.msgSizeGrpA, cellUl->sbSize, cell->isCpUlExtend);
11672 if (recfg->recfgTypes & RGR_CELL_DL_CMNRATE_RECFG)
11674 if (rgSCHCmnDlCnsdrCmnRt(cell, &recfg->dlCmnCodeRate) != ROK)
11676 err->errCause = RGSCHERR_SCH_CFG;
11682 if(TRUE == cell->emtcEnable)
11684 /* Invoke UL sched for cell Recfg */
11685 ret = cellSch->apisEmtcUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11691 /* Invoke DL sched for cell Recfg */
11692 ret = cellSch->apisEmtcDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11701 /* Invoke UL sched for cell Recfg */
11702 ret = cellSch->apisUl->rgSCHRgrUlCellRecfg(cell, recfg, err);
11708 /* Invoke DL sched for cell Recfg */
11709 ret = cellSch->apisDl->rgSCHRgrDlCellRecfg(cell, recfg, err);
11716 if (recfg->recfgTypes & RGR_CELL_DLFS_RECFG)
11718 ret = cellSch->apisDlfs->rgSCHDlfsCellRecfg(cell, recfg, err);
11723 cellSch->dl.isDlFreqSel = recfg->dlfsRecfg.isDlFreqSel;
11726 if (recfg->recfgTypes & RGR_CELL_PWR_RECFG)
11728 ret = rgSCHPwrCellRecfg(cell, recfg);
11738 /***********************************************************
11740 * Func : rgSCHCmnUlCellDeinit
11742 * Desc : Uplink scheduler de-initialisation for cell.
11750 **********************************************************/
11752 PRIVATE Void rgSCHCmnUlCellDeinit
11757 PRIVATE Void rgSCHCmnUlCellDeinit(cell)
11761 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
11764 U8 maxSubfrms = cellUl->numUlSubfrms;
11767 CmLList *lnk = NULLP;
11768 RgSchL2MeasCb *measCb;
11770 TRC2(rgSCHCmnUlCellDeinit);
11773 for(ulSfIdx = 0; ulSfIdx < RGSCH_SF_ALLOC_SIZE; ulSfIdx++)
11775 for(ulSfIdx = 0; ulSfIdx < RGSCH_NUM_SUB_FRAMES; ulSfIdx++)
11778 if(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo != NULLP)
11780 /* ccpu00117052 - MOD - Passing double pointer
11781 for proper NULLP assignment*/
11782 rgSCHUtlFreeSBuf(cell->instIdx,
11783 (Data **)(&(cell->sfAllocArr[ulSfIdx].ulUeInfo.ulAllocInfo)),
11784 cellUl->maxAllocPerUlSf * sizeof(RgInfUeUlAlloc));
11786 /* ccpu00117052 - DEL - removed explicit NULLP assignment
11787 as it is done in above utility function */
11790 /* Free the memory allocated to measCb */
11791 lnk = cell->l2mList.first;
11792 while(lnk != NULLP)
11794 measCb = (RgSchL2MeasCb *)lnk->node;
11795 cmLListDelFrm(&cell->l2mList, lnk);
11797 /* ccpu00117052 - MOD - Passing double pointer
11798 for proper NULLP assignment*/
11799 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&measCb,\
11800 sizeof(RgSchL2MeasCb));
11803 if (cellUl->dmrsArr != NULLP)
11805 /* ccpu00117052 - MOD - Passing double pointer
11806 for proper NULLP assignment*/
11807 rgSCHUtlFreeSBuf(cell->instIdx,(Data **)(&(cellUl->dmrsArr)),
11808 cellUl->dmrsArrSize * sizeof(*cellUl->dmrsArr));
11810 /* De-init subframes */
11812 for (ulSfIdx = 0; ulSfIdx < maxSubfrms; ++ulSfIdx)
11814 for (ulSfIdx = 0; ulSfIdx < RG_SCH_CMN_UL_NUM_SF; ++ulSfIdx)
11817 rgSCHUtlUlSfDeinit(cell, &cellUl->ulSfArr[ulSfIdx]);
11821 if (cellUl->ulSfArr != NULLP)
11823 /* ccpu00117052 - MOD - Passing double pointer
11824 for proper NULLP assignment*/
11825 rgSCHUtlFreeSBuf(cell->instIdx,
11826 (Data **)(&(cellUl->ulSfArr)), maxSubfrms * sizeof(RgSchUlSf));
11834 * @brief Scheduler processing for cell delete.
11838 * Function : rgSCHCmnCellDel
11840 * This functions de-initialises and frees memory
11841 * taken up by scheduler1 for the entire cell.
11843 * @param[in] RgSchCellCb *cell
11847 PUBLIC Void rgSCHCmnCellDel
11852 PUBLIC Void rgSCHCmnCellDel(cell)
11856 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
11857 TRC2(rgSCHCmnCellDel);
11862 if (cellSch == NULLP)
11866 /* Perform the deinit for the UL scheduler */
11867 rgSCHCmnUlCellDeinit(cell);
11869 if(TRUE == cell->emtcEnable)
11871 if (cellSch->apisEmtcUl)
11873 cellSch->apisEmtcUl->rgSCHFreeUlCell(cell);
11877 if (cellSch->apisUl)
11879 /* api pointer checks added (here and below in
11880 * this function). pl check. - antriksh */
11881 cellSch->apisUl->rgSCHFreeUlCell(cell);
11884 /* Perform the deinit for the DL scheduler */
11885 cmLListInit(&cellSch->dl.taLst);
11886 if (cellSch->apisDl)
11888 cellSch->apisDl->rgSCHFreeDlCell(cell);
11891 if (cellSch->apisEmtcDl)
11893 rgSCHEmtcInitTaLst(&cellSch->dl);
11895 cellSch->apisEmtcDl->rgSCHFreeDlCell(cell);
11899 /* DLFS de-initialization */
11900 if (cellSch->dl.isDlFreqSel && cellSch->apisDlfs)
11902 cellSch->apisDlfs->rgSCHDlfsCellDel(cell);
11905 rgSCHPwrCellDel(cell);
11907 rgSCHCmnSpsCellDel(cell);
11910 /* ccpu00117052 - MOD - Passing double pointer
11911 for proper NULLP assignment*/
11912 rgSCHUtlFreeSBuf(cell->instIdx,
11913 (Data**)(&(cell->sc.sch)), (sizeof(RgSchCmnCell)));
11915 } /* rgSCHCmnCellDel */
11919 * @brief This function validates QOS parameters for DL.
11923 * Function: rgSCHCmnValidateDlQos
11924 * Purpose: This function validates QOS parameters for DL.
11926 * Invoked by: Scheduler
11928 * @param[in] CrgLchQosCfg *dlQos
11933 PRIVATE S16 rgSCHCmnValidateDlQos
11935 RgrLchQosCfg *dlQos
11938 PRIVATE S16 rgSCHCmnValidateDlQos(dlQos)
11939 RgrLchQosCfg *dlQos;
11942 U8 qci = dlQos->qci;
11944 TRC2(rgSCHCmnValidateDlQos);
11946 if ( qci < RG_SCH_CMN_MIN_QCI || qci > RG_SCH_CMN_MAX_QCI )
11951 if ((qci >= RG_SCH_CMN_GBR_QCI_START) &&
11952 (qci <= RG_SCH_CMN_GBR_QCI_END))
11954 if ((dlQos->mbr == 0) || (dlQos->mbr < dlQos->gbr))
11963 * @brief Scheduler invocation on logical channel addition.
11967 * Function : rgSCHCmnRgrLchCfg
11969 * This functions does required processing when a new
11970 * (dedicated) logical channel is added. Assumes lcg
11971 * pointer in ulLc is set.
11973 * @param[in] RgSchCellCb *cell
11974 * @param[in] RgSchUeCb *ue
11975 * @param[in] RgSchDlLcCb *dlLc
11976 * @param[int] RgrLchCfg *lcCfg
11977 * @param[out] RgSchErrInfo *err
11983 PUBLIC S16 rgSCHCmnRgrLchCfg
11992 PUBLIC S16 rgSCHCmnRgrLchCfg(cell, ue, dlLc, lcCfg, err)
12002 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12004 TRC2(rgSCHCmnRgrLchCfg);
12006 ret = rgSCHUtlAllocSBuf(cell->instIdx,
12007 (Data**)&((dlLc)->sch), (sizeof(RgSchCmnDlSvc)));
12010 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRgrLchCfg(): "
12011 "SCH struct alloc failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12012 err->errCause = RGSCHERR_SCH_CFG;
12015 if(lcCfg->lcType != CM_LTE_LCH_DCCH)
12017 ret = rgSCHCmnValidateDlQos(&lcCfg->dlInfo.dlQos);
12020 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"rgSchCmnCrgLcCfg(): "
12021 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12022 err->errCause = RGSCHERR_SCH_CFG;
12025 /* Perform DL service activation in the scheduler */
12026 ((RgSchCmnDlSvc *)(dlLc->sch))->qci = lcCfg->dlInfo.dlQos.qci;
12027 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = rgSchCmnDlQciPrio[lcCfg->dlInfo.dlQos.qci - 1];
12028 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcCfg->dlInfo.dlQos.gbr * \
12029 RG_SCH_CMN_REFRESH_TIME)/100;
12030 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcCfg->dlInfo.dlQos.mbr * \
12031 RG_SCH_CMN_REFRESH_TIME)/100;
12035 /*assigning highest priority to DCCH */
12036 ((RgSchCmnDlSvc *)(dlLc->sch))->prio=RG_SCH_CMN_DCCH_PRIO;
12039 dlLc->lcType=lcCfg->lcType;
12042 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12044 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcCfg(cell, ue,dlLc ,lcCfg, err);
12053 ret = cellSch->apisDl->rgSCHRgrDlLcCfg(cell, ue, dlLc, lcCfg, err);
12061 if(TRUE == ue->isEmtcUe)
12063 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12072 ret = cellSch->apisUl->rgSCHRgrUlLcCfg(cell, ue, lcCfg, err);
12082 rgSCHSCellDlLcCfg(cell, ue, dlLc);
12088 if(lcCfg->dlInfo.dlSpsCfg.isSpsEnabled)
12090 /* Invoke SPS module if SPS is enabled for the service */
12091 ret = rgSCHCmnSpsDlLcCfg(cell, ue, dlLc, lcCfg, err);
12094 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "rgSchCmnRgrLchCfg(): "
12095 "SPS configuration failed for DL LC for CRNTI:%d LCID:%d",ue->ueId,lcCfg->lcId);
12096 err->errCause = RGSCHERR_SCH_CFG;
12106 * @brief Scheduler invocation on logical channel addition.
12110 * Function : rgSCHCmnRgrLchRecfg
12112 * This functions does required processing when an existing
12113 * (dedicated) logical channel is reconfigured. Assumes lcg
12114 * pointer in ulLc is set to the old value.
12115 * Independent of whether new LCG is meant to be configured,
12116 * the new LCG scheduler information is accessed and possibly modified.
12118 * @param[in] RgSchCellCb *cell
12119 * @param[in] RgSchUeCb *ue
12120 * @param[in] RgSchDlLcCb *dlLc
12121 * @param[int] RgrLchRecfg *lcRecfg
12122 * @param[out] RgSchErrInfo *err
12128 PUBLIC S16 rgSCHCmnRgrLchRecfg
12133 RgrLchRecfg *lcRecfg,
12137 PUBLIC S16 rgSCHCmnRgrLchRecfg(cell, ue, dlLc, lcRecfg, err)
12141 RgrLchRecfg *lcRecfg;
12146 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12148 TRC2(rgSCHCmnRgrLchRecfg)
12150 if(dlLc->lcType != CM_LTE_LCH_DCCH)
12152 ret = rgSCHCmnValidateDlQos(&lcRecfg->dlRecfg.dlQos);
12156 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
12157 "DlQos validation failed for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12158 err->errCause = RGSCHERR_SCH_CFG;
12161 if (((RgSchCmnDlSvc *)(dlLc->sch))->qci != lcRecfg->dlRecfg.dlQos.qci)
12163 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Qci, hence lc Priority change "
12164 "not supported for CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12165 err->errCause = RGSCHERR_SCH_CFG;
12168 ((RgSchCmnDlSvc *)(dlLc->sch))->gbr = (lcRecfg->dlRecfg.dlQos.gbr * \
12169 RG_SCH_CMN_REFRESH_TIME)/100;
12170 ((RgSchCmnDlSvc *)(dlLc->sch))->mbr = (lcRecfg->dlRecfg.dlQos.mbr * \
12171 RG_SCH_CMN_REFRESH_TIME)/100;
12175 /*assigning highest priority to DCCH */
12176 ((RgSchCmnDlSvc *)(dlLc->sch))->prio = RG_SCH_CMN_DCCH_PRIO;
12180 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12182 ret = cellSch->apisEmtcDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12187 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12196 ret = cellSch->apisDl->rgSCHRgrDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12201 ret = cellSch->apisUl->rgSCHRgrUlLcRecfg(cell, ue, lcRecfg, err);
12209 if (lcRecfg->recfgTypes & RGR_DL_LC_SPS_RECFG)
12211 /* Invoke SPS module if SPS is enabled for the service */
12212 if(lcRecfg->dlRecfg.dlSpsRecfg.isSpsEnabled)
12214 ret = rgSCHCmnSpsDlLcRecfg(cell, ue, dlLc, lcRecfg, err);
12217 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"SPS re-configuration not "
12218 "supported for dlLC Ignore this CRNTI:%d LCID:%d",ue->ueId,lcRecfg->lcId);
12229 * @brief Scheduler invocation on logical channel addition.
12233 * Function : rgSCHCmnRgrLcgCfg
12235 * This functions does required processing when a new
12236 * (dedicated) logical channel is added. Assumes lcg
12237 * pointer in ulLc is set.
12239 * @param[in] RgSchCellCb *cell,
12240 * @param[in] RgSchUeCb *ue,
12241 * @param[in] RgSchLcgCb *lcg,
12242 * @param[in] RgrLcgCfg *lcgCfg,
12243 * @param[out] RgSchErrInfo *err
12249 PUBLIC S16 rgSCHCmnRgrLcgCfg
12258 PUBLIC S16 rgSCHCmnRgrLcgCfg(cell, ue, lcg, lcgCfg, err)
12267 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12268 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCfg->ulInfo.lcgId].sch));
12270 TRC2(rgSCHCmnRgrLcgCfg);
12272 ulLcg->cfgdGbr = (lcgCfg->ulInfo.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12273 ulLcg->effGbr = ulLcg->cfgdGbr;
12274 ulLcg->deltaMbr = ((lcgCfg->ulInfo.mbr - lcgCfg->ulInfo.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12275 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12278 if(TRUE == ue->isEmtcUe)
12280 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12289 ret = cellSch->apisUl->rgSCHRgrUlLcgCfg(cell, ue, lcg, lcgCfg, err);
12295 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12297 /* Indicate MAC that this LCG is GBR LCG */
12298 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcgCfg->ulInfo.lcgId, TRUE);
12304 * @brief Scheduler invocation on logical channel addition.
12308 * Function : rgSCHCmnRgrLcgRecfg
12310 * This functions does required processing when a new
12311 * (dedicated) logical channel is added. Assumes lcg
12312 * pointer in ulLc is set.
12314 * @param[in] RgSchCellCb *cell,
12315 * @param[in] RgSchUeCb *ue,
12316 * @param[in] RgSchLcgCb *lcg,
12317 * @param[in] RgrLcgRecfg *reCfg,
12318 * @param[out] RgSchErrInfo *err
12324 PUBLIC S16 rgSCHCmnRgrLcgRecfg
12329 RgrLcgRecfg *reCfg,
12333 PUBLIC S16 rgSCHCmnRgrLcgRecfg(cell, ue, lcg, reCfg, err)
12337 RgrLcgRecfg *reCfg;
12342 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12343 RgSchCmnLcg *ulLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[reCfg->ulRecfg.lcgId].sch));
12345 TRC2(rgSCHCmnRgrLcgRecfg);
12347 ulLcg->cfgdGbr = (reCfg->ulRecfg.gbr * RG_SCH_CMN_REFRESH_TIME)/100;
12348 ulLcg->effGbr = ulLcg->cfgdGbr;
12349 ulLcg->deltaMbr = ((reCfg->ulRecfg.mbr - reCfg->ulRecfg.gbr) * RG_SCH_CMN_REFRESH_TIME)/100;
12350 ulLcg->effDeltaMbr = ulLcg->deltaMbr;
12353 if(TRUE == ue->isEmtcUe)
12355 ret = cellSch->apisEmtcUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12364 ret = cellSch->apisUl->rgSCHRgrUlLcgRecfg(cell, ue, lcg, reCfg, err);
12370 if (RGSCH_IS_GBR_BEARER(ulLcg->cfgdGbr))
12372 /* Indicate MAC that this LCG is GBR LCG */
12373 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, TRUE);
12377 /* In case of RAB modification */
12378 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, reCfg->ulRecfg.lcgId, FALSE);
12383 /***********************************************************
12385 * Func : rgSCHCmnRgrLchDel
12387 * Desc : Scheduler handling for a (dedicated)
12388 * uplink logical channel being deleted.
12395 **********************************************************/
12397 PUBLIC S16 rgSCHCmnRgrLchDel
12405 PUBLIC S16 rgSCHCmnRgrLchDel(cell, ue, lcId, lcgId)
12412 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12413 TRC2(rgSCHCmnRgrLchDel);
12415 if(TRUE == ue->isEmtcUe)
12417 cellSch->apisEmtcUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12422 cellSch->apisUl->rgSCHRgrUlLchDel(cell, ue, lcId, lcgId);
12427 /***********************************************************
12429 * Func : rgSCHCmnLcgDel
12431 * Desc : Scheduler handling for a (dedicated)
12432 * uplink logical channel being deleted.
12440 **********************************************************/
12442 PUBLIC Void rgSCHCmnLcgDel
12449 PUBLIC Void rgSCHCmnLcgDel(cell, ue, lcg)
12455 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12456 RgSchCmnLcg *lcgCmn = RG_SCH_CMN_GET_UL_LCG(lcg);
12457 TRC2(rgSCHCmnLcgDel);
12459 if (lcgCmn == NULLP)
12464 if (RGSCH_IS_GBR_BEARER(lcgCmn->cfgdGbr))
12466 /* Indicate MAC that this LCG is GBR LCG */
12467 rgSCHUtlBuildNSendLcgReg(cell, ue->ueId, lcg->lcgId, FALSE);
12471 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
12473 rgSCHCmnSpsUlLcgDel(cell, ue, lcg);
12475 #endif /* LTEMAC_SPS */
12477 lcgCmn->effGbr = 0;
12478 lcgCmn->reportedBs = 0;
12479 lcgCmn->cfgdGbr = 0;
12480 /* set lcg bs to 0. Deletion of control block happens
12481 * at the time of UE deletion. */
12484 if(TRUE == ue->isEmtcUe)
12486 cellSch->apisEmtcUl->rgSCHFreeUlLcg(cell, ue, lcg);
12491 cellSch->apisUl->rgSCHFreeUlLcg(cell, ue, lcg);
12498 * @brief This function deletes a service from scheduler.
12502 * Function: rgSCHCmnFreeDlLc
12503 * Purpose: This function is made available through a FP for
12504 * making scheduler aware of a service being deleted from UE.
12506 * Invoked by: BO and Scheduler
12508 * @param[in] RgSchCellCb* cell
12509 * @param[in] RgSchUeCb* ue
12510 * @param[in] RgSchDlLcCb* svc
12515 PUBLIC Void rgSCHCmnFreeDlLc
12522 PUBLIC Void rgSCHCmnFreeDlLc(cell, ue, svc)
12528 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
12529 TRC2(rgSCHCmnFreeDlLc);
12530 if (svc->sch == NULLP)
12535 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
12537 cellSch->apisEmtcDl->rgSCHFreeDlLc(cell, ue, svc);
12542 cellSch->apisDl->rgSCHFreeDlLc(cell, ue, svc);
12548 rgSCHSCellDlLcDel(cell, ue, svc);
12553 /* If SPS service, invoke SPS module */
12554 if (svc->dlLcSpsCfg.isSpsEnabled)
12556 rgSCHCmnSpsDlLcDel(cell, ue, svc);
12560 /* ccpu00117052 - MOD - Passing double pointer
12561 for proper NULLP assignment*/
12562 rgSCHUtlFreeSBuf(cell->instIdx,
12563 (Data**)(&(svc->sch)), (sizeof(RgSchCmnDlSvc)));
12566 rgSCHLaaDeInitDlLchCb(cell, svc);
12575 * @brief This function Processes the Final Allocations
12576 * made by the RB Allocator against the requested
12577 * CCCH SDURetx Allocations.
12581 * Function: rgSCHCmnDlCcchSduRetxFnlz
12582 * Purpose: This function Processes the Final Allocations
12583 * made by the RB Allocator against the requested
12584 * CCCH Retx Allocations.
12585 * Scans through the scheduled list of ccchSdu retrans
12586 * fills the corresponding pdcch, adds the hqProc to
12587 * the corresponding SubFrm and removes the hqP from
12590 * Invoked by: Common Scheduler
12592 * @param[in] RgSchCellCb *cell
12593 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12598 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz
12601 RgSchCmnDlRbAllocInfo *allocInfo
12604 PRIVATE Void rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo)
12606 RgSchCmnDlRbAllocInfo *allocInfo;
12610 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12611 RgSchDlRbAlloc *rbAllocInfo;
12612 RgSchDlHqProcCb *hqP;
12614 TRC2(rgSCHCmnDlCcchSduRetxFnlz);
12616 /* Traverse through the Scheduled Retx List */
12617 node = allocInfo->ccchSduAlloc.schdCcchSduRetxLst.first;
12620 hqP = (RgSchDlHqProcCb *)(node->node);
12622 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, cell);
12624 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12626 /* Remove the HqP from cell's ccchSduRetxLst */
12627 cmLListDelFrm(&cmnCellDl->ccchSduRetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12628 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12630 /* Fix: syed dlAllocCb reset should be performed.
12631 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12632 rgSCHCmnDlUeResetTemp(ue, hqP);
12634 /* Fix: syed dlAllocCb reset should be performed.
12635 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12636 node = allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst.first;
12639 hqP = (RgSchDlHqProcCb *)(node->node);
12642 /* reset the UE allocation Information */
12643 rgSCHCmnDlUeResetTemp(ue, hqP);
12649 * @brief This function Processes the Final Allocations
12650 * made by the RB Allocator against the requested
12651 * CCCH Retx Allocations.
12655 * Function: rgSCHCmnDlCcchRetxFnlz
12656 * Purpose: This function Processes the Final Allocations
12657 * made by the RB Allocator against the requested
12658 * CCCH Retx Allocations.
12659 * Scans through the scheduled list of msg4 retrans
12660 * fills the corresponding pdcch, adds the hqProc to
12661 * the corresponding SubFrm and removes the hqP from
12664 * Invoked by: Common Scheduler
12666 * @param[in] RgSchCellCb *cell
12667 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12672 PRIVATE Void rgSCHCmnDlCcchRetxFnlz
12675 RgSchCmnDlRbAllocInfo *allocInfo
12678 PRIVATE Void rgSCHCmnDlCcchRetxFnlz(cell, allocInfo)
12680 RgSchCmnDlRbAllocInfo *allocInfo;
12684 RgSchCmnDlCell *cmnCellDl = RG_SCH_CMN_GET_DL_CELL(cell);
12685 RgSchDlRbAlloc *rbAllocInfo;
12686 RgSchDlHqProcCb *hqP;
12688 TRC2(rgSCHCmnDlCcchRetxFnlz);
12690 /* Traverse through the Scheduled Retx List */
12691 node = allocInfo->msg4Alloc.schdMsg4RetxLst.first;
12694 hqP = (RgSchDlHqProcCb *)(node->node);
12695 raCb = hqP->hqE->raCb;
12696 rbAllocInfo = &raCb->rbAllocInfo;
12698 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12700 /* Remove the HqP from cell's msg4RetxLst */
12701 cmLListDelFrm(&cmnCellDl->msg4RetxLst, &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
12702 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
12703 /* Fix: syed dlAllocCb reset should be performed.
12704 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12705 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12706 rgSCHCmnDlHqPResetTemp(hqP);
12708 /* Fix: syed dlAllocCb reset should be performed.
12709 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12710 node = allocInfo->msg4Alloc.nonSchdMsg4RetxLst.first;
12713 hqP = (RgSchDlHqProcCb *)(node->node);
12714 raCb = hqP->hqE->raCb;
12716 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12717 rgSCHCmnDlHqPResetTemp(hqP);
12724 * @brief This function Processes the Final Allocations
12725 * made by the RB Allocator against the requested
12726 * CCCH SDU tx Allocations.
12730 * Function: rgSCHCmnDlCcchSduTxFnlz
12731 * Purpose: This function Processes the Final Allocations
12732 * made by the RB Allocator against the requested
12733 * CCCH tx Allocations.
12734 * Scans through the scheduled list of CCCH SDU trans
12735 * fills the corresponding pdcch, adds the hqProc to
12736 * the corresponding SubFrm and removes the hqP from
12739 * Invoked by: Common Scheduler
12741 * @param[in] RgSchCellCb *cell
12742 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12747 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz
12750 RgSchCmnDlRbAllocInfo *allocInfo
12753 PRIVATE Void rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo)
12755 RgSchCmnDlRbAllocInfo *allocInfo;
12760 RgSchDlRbAlloc *rbAllocInfo;
12761 RgSchDlHqProcCb *hqP;
12762 RgSchLchAllocInfo lchSchdData;
12763 TRC2(rgSCHCmnDlCcchSduTxFnlz);
12765 /* Traverse through the Scheduled Retx List */
12766 node = allocInfo->ccchSduAlloc.schdCcchSduTxLst.first;
12769 hqP = (RgSchDlHqProcCb *)(node->node);
12770 ueCb = hqP->hqE->ue;
12772 rbAllocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb, cell);
12774 /* fill the pdcch and HqProc */
12775 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12777 /* Remove the raCb from cell's toBeSchdLst */
12778 cmLListDelFrm(&cell->ccchSduUeLst, &ueCb->ccchSduLnk);
12779 ueCb->ccchSduLnk.node = (PTR)NULLP;
12781 /* Fix : Resetting this required to avoid complication
12782 * in reestablishment case */
12783 ueCb->dlCcchInfo.bo = 0;
12785 /* Indicate DHM of the CCCH LC scheduling */
12786 hqP->tbInfo[0].contResCe = NOTPRSNT;
12787 lchSchdData.lcId = 0;
12788 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12789 (RGSCH_MSG4_HDRSIZE);
12790 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12792 /* Fix: syed dlAllocCb reset should be performed.
12793 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12794 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12796 /* Fix: syed dlAllocCb reset should be performed.
12797 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12798 node = allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst.first;
12801 hqP = (RgSchDlHqProcCb *)(node->node);
12802 ueCb = hqP->hqE->ue;
12804 /* Release HqProc */
12805 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12806 /*Fix: Removing releasing of TB1 as it will not exist for CCCH SDU and hence caused a crash*/
12807 /*rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12808 /* reset the UE allocation Information */
12809 rgSCHCmnDlUeResetTemp(ueCb, hqP);
12816 * @brief This function Processes the Final Allocations
12817 * made by the RB Allocator against the requested
12818 * CCCH tx Allocations.
12822 * Function: rgSCHCmnDlCcchTxFnlz
12823 * Purpose: This function Processes the Final Allocations
12824 * made by the RB Allocator against the requested
12825 * CCCH tx Allocations.
12826 * Scans through the scheduled list of msg4 trans
12827 * fills the corresponding pdcch, adds the hqProc to
12828 * the corresponding SubFrm and removes the hqP from
12831 * Invoked by: Common Scheduler
12833 * @param[in] RgSchCellCb *cell
12834 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
12839 PRIVATE Void rgSCHCmnDlCcchTxFnlz
12842 RgSchCmnDlRbAllocInfo *allocInfo
12845 PRIVATE Void rgSCHCmnDlCcchTxFnlz(cell, allocInfo)
12847 RgSchCmnDlRbAllocInfo *allocInfo;
12852 RgSchDlRbAlloc *rbAllocInfo;
12853 RgSchDlHqProcCb *hqP;
12854 RgSchLchAllocInfo lchSchdData;
12855 TRC2(rgSCHCmnDlCcchTxFnlz);
12857 /* Traverse through the Scheduled Retx List */
12858 node = allocInfo->msg4Alloc.schdMsg4TxLst.first;
12861 hqP = (RgSchDlHqProcCb *)(node->node);
12862 raCb = hqP->hqE->raCb;
12864 rbAllocInfo = &raCb->rbAllocInfo;
12866 /* fill the pdcch and HqProc */
12867 rgSCHCmnFillHqPPdcch(cell, rbAllocInfo, hqP);
12868 /* MSG4 Fix Start */
12870 rgSCHRamRmvFrmRaInfoSchdLst(cell, raCb);
12873 /* Indicate DHM of the CCCH LC scheduling */
12874 lchSchdData.lcId = 0;
12875 lchSchdData.schdData = hqP->tbInfo[0].ccchSchdInfo.totBytes -
12876 (RGSCH_MSG4_HDRSIZE + RGSCH_CONT_RESID_SIZE);
12877 /* TRansmitting presence of cont Res CE across MAC-SCH interface to
12878 * identify CCCH SDU transmissions which need to be done
12880 * contention resolution CE*/
12881 hqP->tbInfo[0].contResCe = PRSNT_NODEF;
12882 /*Dont add lc if only cont res CE is being transmitted*/
12883 if(raCb->dlCcchInfo.bo)
12885 rgSCHDhmAddLcData(cell->instIdx, &lchSchdData, &hqP->tbInfo[0]);
12890 /* Fix: syed dlAllocCb reset should be performed.
12891 * zombie info in dlAllocCb leading to crash rbNum wraparound */
12892 cmMemset((U8 *)&raCb->rbAllocInfo, (U8)0, sizeof(raCb->rbAllocInfo));
12893 rgSCHCmnDlHqPResetTemp(hqP);
12895 node = allocInfo->msg4Alloc.nonSchdMsg4TxLst.first;
12898 hqP = (RgSchDlHqProcCb *)(node->node);
12899 raCb = hqP->hqE->raCb;
12901 rbAllocInfo = &raCb->rbAllocInfo;
12902 /* Release HqProc */
12903 rgSCHDhmRlsHqpTb(hqP, 0, FALSE);
12904 /*Fix: Removing releasing of TB1 as it will not exist for MSG4 and hence caused a crash*/
12905 /* rgSCHDhmRlsHqpTb(hqP, 1, FALSE);*/
12906 /* reset the UE allocation Information */
12907 cmMemset((U8 *)rbAllocInfo, (U8)0, sizeof(*rbAllocInfo));
12908 rgSCHCmnDlHqPResetTemp(hqP);
12915 * @brief This function calculates the BI Index to be sent in the Bi header
12919 * Function: rgSCHCmnGetBiIndex
12920 * Purpose: This function Processes utilizes the previous BI time value
12921 * calculated and the difference last BI sent time and current time. To
12922 * calculate the latest BI Index. It also considers the how many UE's
12923 * Unserved in this subframe.
12925 * Invoked by: Common Scheduler
12927 * @param[in] RgSchCellCb *cell
12928 * @param[in] U32 ueCount
12933 PUBLIC U8 rgSCHCmnGetBiIndex
12939 PUBLIC U8 rgSCHCmnGetBiIndex(cell, ueCount)
12944 S16 prevVal = 0; /* To Store Intermediate Value */
12945 U16 newBiVal = 0; /* To store Bi Value in millisecond */
12949 TRC2(rgSCHCmnGetBiIndex)
12951 if (cell->biInfo.prevBiTime != 0)
12954 if(cell->emtcEnable == TRUE)
12956 timeDiff =(RGSCH_CALC_SF_DIFF_EMTC(cell->crntTime, cell->biInfo.biTime));
12961 timeDiff =(RGSCH_CALC_SF_DIFF(cell->crntTime, cell->biInfo.biTime));
12964 prevVal = cell->biInfo.prevBiTime - timeDiff;
12970 newBiVal = RG_SCH_CMN_GET_BI_VAL(prevVal,ueCount);
12971 /* To be used next time when BI is calculated */
12973 if(cell->emtcEnable == TRUE)
12975 RGSCHCPYTIMEINFO_EMTC(cell->crntTime, cell->biInfo.biTime)
12980 RGSCHCPYTIMEINFO(cell->crntTime, cell->biInfo.biTime)
12983 /* Search the actual BI Index from table Backoff Parameters Value and
12984 * return that Index */
12987 if (rgSchCmnBiTbl[idx] > newBiVal)
12992 }while(idx < RG_SCH_CMN_NUM_BI_VAL-1);
12993 cell->biInfo.prevBiTime = rgSchCmnBiTbl[idx];
12994 /* For 16 Entries in Table 7.2.1 36.321.880 - 3 reserved so total 13 Entries */
12995 return (idx); /* Returning reserved value from table UE treats it has 960 ms */
12996 } /* rgSCHCmnGetBiIndex */
13000 * @brief This function Processes the Final Allocations
13001 * made by the RB Allocator against the requested
13002 * RAR allocations. Assumption: The reuqested
13003 * allocations are always satisfied completely.
13004 * Hence no roll back.
13008 * Function: rgSCHCmnDlRaRspFnlz
13009 * Purpose: This function Processes the Final Allocations
13010 * made by the RB Allocator against the requested.
13011 * Takes care of PDCCH filling.
13013 * Invoked by: Common Scheduler
13015 * @param[in] RgSchCellCb *cell
13016 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13021 PRIVATE Void rgSCHCmnDlRaRspFnlz
13024 RgSchCmnDlRbAllocInfo *allocInfo
13027 PRIVATE Void rgSCHCmnDlRaRspFnlz(cell, allocInfo)
13029 RgSchCmnDlRbAllocInfo *allocInfo;
13033 RgSchDlRbAlloc *raRspAlloc;
13034 RgSchDlSf *subFrm = NULLP;
13038 RgSchRaReqInfo *raReq;
13040 RgSchUlAlloc *ulAllocRef=NULLP;
13041 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13042 U8 allocRapidCnt = 0;
13044 U32 msg3SchdIdx = 0;
13045 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
13049 TRC2(rgSCHCmnDlRaRspFnlz);
13051 for (rarCnt=0; rarCnt<RG_SCH_CMN_MAX_CMN_PDCCH; rarCnt++)
13053 raRspAlloc = &allocInfo->raRspAlloc[rarCnt];
13054 /* Having likely condition first for optimization */
13055 if (!raRspAlloc->pdcch)
13061 subFrm = raRspAlloc->dlSf;
13062 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13063 /* Corrected RACH handling for multiple RAPIDs per RARNTI */
13064 allocRapidCnt = raRspAlloc->numRapids;
13065 while (allocRapidCnt)
13067 raReq = (RgSchRaReqInfo *)(reqLst->first->node);
13068 /* RACHO: If dedicated preamble, then allocate UL Grant
13069 * (consequence of handover/pdcchOrder) and continue */
13070 if (RGSCH_IS_DEDPRM(cell, raReq->raReq.rapId))
13072 rgSCHCmnHdlHoPo(cell, &subFrm->raRsp[rarCnt].contFreeUeLst,
13074 cmLListDelFrm(reqLst, reqLst->first);
13076 /* ccpu00117052 - MOD - Passing double pointer
13077 for proper NULLP assignment*/
13078 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13079 sizeof(RgSchRaReqInfo));
13083 if(cell->overLoadBackOffEnab)
13084 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13085 cmLListDelFrm(reqLst, reqLst->first);
13087 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13088 sizeof(RgSchRaReqInfo));
13091 /* Attempt to include each RA request into the RSP */
13092 /* Any failure in the procedure is considered to */
13093 /* affect futher allocations in the same TTI. When */
13094 /* a failure happens, we break out and complete */
13095 /* the processing for random access */
13096 if (rgSCHRamCreateRaCb(cell, &raCb, &err) != ROK)
13100 /* Msg3 allocation request to USM */
13101 if (raReq->raReq.rapId < cell->rachCfg.sizeRaPreambleGrpA)
13105 /*ccpu00128820 - MOD - Msg3 alloc double delete issue*/
13106 rgSCHCmnMsg3GrntReq(cell, raCb->tmpCrnti, preamGrpA, \
13107 &(raCb->msg3HqProc), &ulAllocRef, &raCb->msg3HqProcId);
13108 if (ulAllocRef == NULLP)
13110 rgSCHRamDelRaCb(cell, raCb, TRUE);
13113 if (raReq->raReq.cqiPres)
13115 raCb->ccchCqi = raReq->raReq.cqiIdx;
13119 raCb->ccchCqi = cellDl->ccchCqi;
13121 raCb->rapId = raReq->raReq.rapId;
13122 raCb->ta.pres = TRUE;
13123 raCb->ta.val = raReq->raReq.ta;
13124 raCb->msg3Grnt = ulAllocRef->grnt;
13125 /* Populating the tpc value received */
13126 raCb->msg3Grnt.tpc = raReq->raReq.tpc;
13127 /* PHR handling for MSG3 */
13128 ulAllocRef->raCb = raCb;
13130 /* To the crntTime, add the MIN time at which UE will
13131 * actually send MSG3 i.e DL_DELTA+6 */
13132 raCb->msg3AllocTime = cell->crntTime;
13133 RGSCH_INCR_SUB_FRAME(raCb->msg3AllocTime, RG_SCH_CMN_MIN_MSG3_RECP_INTRVL);
13135 msg3SchdIdx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) %
13136 RGSCH_NUM_SUB_FRAMES;
13137 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
13138 special subframe */
13139 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][msg3SchdIdx] !=
13140 RG_SCH_TDD_UL_SUBFRAME)
13142 RGSCHCMNADDTOCRNTTIME(cell->crntTime,raCb->msg3AllocTime,
13143 RG_SCH_CMN_DL_DELTA)
13144 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][
13145 raCb->msg3AllocTime.slot];
13146 RGSCHCMNADDTOCRNTTIME(raCb->msg3AllocTime, raCb->msg3AllocTime,
13150 cmLListAdd2Tail(&subFrm->raRsp[rarCnt].raRspLst, &raCb->rspLnk);
13151 raCb->rspLnk.node = (PTR)raCb;
13152 cmLListDelFrm(reqLst, reqLst->first);
13154 /* ccpu00117052 - MOD - Passing double pointer
13155 for proper NULLP assignment*/
13156 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&raReq,
13157 sizeof(RgSchRaReqInfo));
13159 /* SR_RACH_STATS : RAR scheduled */
13164 /* Fill subframe data members */
13165 subFrm->raRsp[rarCnt].raRnti = raRspAlloc->rnti;
13166 subFrm->raRsp[rarCnt].pdcch = raRspAlloc->pdcch;
13167 subFrm->raRsp[rarCnt].tbSz = raRspAlloc->tbInfo[0].bytesAlloc;
13168 /* Fill PDCCH data members */
13169 rgSCHCmnFillPdcch(cell, subFrm->raRsp[rarCnt].pdcch, raRspAlloc);
13172 if(cell->overLoadBackOffEnab)
13173 {/* rach Overlaod conrol is triggerd, Skipping this rach */
13174 subFrm->raRsp[rarCnt].backOffInd.pres = PRSNT_NODEF;
13175 subFrm->raRsp[rarCnt].backOffInd.val = cell->overLoadBackOffval;
13180 subFrm->raRsp[rarCnt].backOffInd.pres = NOTPRSNT;
13183 /*[ccpu00125212] Avoiding sending of empty RAR in case of RAR window
13184 is short and UE is sending unauthorised preamble.*/
13185 reqLst = &cell->raInfo.raReqLst[raRspAlloc->raIndex];
13186 if ((raRspAlloc->biEstmt) && (reqLst->count))
13188 subFrm->raRsp[0].backOffInd.pres = PRSNT_NODEF;
13189 /* Added as part of Upgrade */
13190 subFrm->raRsp[0].backOffInd.val =
13191 rgSCHCmnGetBiIndex(cell, reqLst->count);
13193 /* SR_RACH_STATS : Back Off Inds */
13197 else if ((subFrm->raRsp[rarCnt].raRspLst.first == NULLP) &&
13198 (subFrm->raRsp[rarCnt].contFreeUeLst.first == NULLP))
13200 /* Return the grabbed PDCCH */
13201 rgSCHUtlPdcchPut(cell, &subFrm->pdcchInfo, raRspAlloc->pdcch);
13202 subFrm->raRsp[rarCnt].pdcch = NULLP;
13203 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnRaRspAlloc(): "
13204 "Not even one RaReq.");
13208 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId,
13209 "RNTI:%d Scheduled RAR @ (%u,%u) ",
13211 cell->crntTime.sfn,
13212 cell->crntTime.slot);
13218 * @brief This function computes rv.
13222 * Function: rgSCHCmnDlCalcRvForBcch
13223 * Purpose: This function computes rv.
13225 * Invoked by: Common Scheduler
13227 * @param[in] RgSchCellCb *cell
13228 * @param[in] Bool si
13234 PRIVATE U8 rgSCHCmnDlCalcRvForBcch
13241 PRIVATE U8 rgSCHCmnDlCalcRvForBcch(cell, si, i)
13248 CmLteTimingInfo frm;
13249 TRC2(rgSCHCmnDlCalcRvForBcch);
13251 frm = cell->crntTime;
13252 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
13260 k = (frm.sfn/2) % 4;
13262 rv = RGSCH_CEIL(3*k, 2) % 4;
13267 * @brief This function Processes the Final Allocations
13268 * made by the RB Allocator against the requested
13269 * BCCH/PCCH allocations. Assumption: The reuqested
13270 * allocations are always satisfied completely.
13271 * Hence no roll back.
13275 * Function: rgSCHCmnDlBcchPcchFnlz
13276 * Purpose: This function Processes the Final Allocations
13277 * made by the RB Allocator against the requested.
13278 * Takes care of PDCCH filling.
13280 * Invoked by: Common Scheduler
13282 * @param[in] RgSchCellCb *cell
13283 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
13288 PRIVATE Void rgSCHCmnDlBcchPcchFnlz
13291 RgSchCmnDlRbAllocInfo *allocInfo
13294 PRIVATE Void rgSCHCmnDlBcchPcchFnlz(cell, allocInfo)
13296 RgSchCmnDlRbAllocInfo *allocInfo;
13299 RgSchDlRbAlloc *rbAllocInfo;
13303 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
13305 #ifdef LTEMAC_HDFDD
13306 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13308 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13312 /* Moving variables to available scope for optimization */
13313 RgSchClcDlLcCb *pcch;
13316 RgSchClcDlLcCb *bcch;
13319 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
13321 TRC2(rgSCHCmnDlBcchPcchFnlz);
13324 rbAllocInfo = &allocInfo->pcchAlloc;
13325 if (rbAllocInfo->pdcch)
13327 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13329 /* Added sfIdx calculation for TDD as well */
13331 #ifdef LTEMAC_HDFDD
13332 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13334 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13337 subFrm = rbAllocInfo->dlSf;
13338 pcch = rgSCHDbmGetPcch(cell);
13341 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnDlBcchPcchFnlz( ): "
13342 "No Pcch Present");
13346 /* Added Dl TB count for paging message transmission*/
13348 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13350 bo = (RgSchClcBoRpt *)pcch->boLst.first->node;
13351 cmLListDelFrm(&pcch->boLst, &bo->boLstEnt);
13352 /* ccpu00117052 - MOD - Passing double pointer
13353 for proper NULLP assignment*/
13354 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13355 /* Fill subframe data members */
13356 subFrm->pcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13357 subFrm->pcch.pdcch = rbAllocInfo->pdcch;
13358 /* Fill PDCCH data members */
13359 rgSCHCmnFillPdcch(cell, subFrm->pcch.pdcch, rbAllocInfo);
13360 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, pcch->lcId, TRUE);
13361 /* ccpu00132314-ADD-Update the tx power allocation info
13362 TODO-Need to add a check for max tx power per symbol */
13363 subfrmAlloc->cmnLcInfo.pcchInfo.txPwrOffset = cellDl->pcchTxPwrOffset;
13367 rbAllocInfo = &allocInfo->bcchAlloc;
13368 if (rbAllocInfo->pdcch)
13370 RgInfSfAlloc *subfrmAlloc = &(cell->sfAllocArr[nextSfIdx]);
13372 #ifdef LTEMAC_HDFDD
13373 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
13375 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
13378 subFrm = rbAllocInfo->dlSf;
13380 /* Fill subframe data members */
13381 subFrm->bcch.tbSize = rbAllocInfo->tbInfo[0].bytesAlloc;
13382 subFrm->bcch.pdcch = rbAllocInfo->pdcch;
13383 /* Fill PDCCH data members */
13384 rgSCHCmnFillPdcch(cell, subFrm->bcch.pdcch, rbAllocInfo);
13386 if(rbAllocInfo->schdFirst)
13389 bcch = rgSCHDbmGetFirstBcchOnDlsch(cell);
13390 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13392 /*Copy the SIB1 msg buff into interface buffer */
13393 SCpyMsgMsg(cell->siCb.crntSiInfo.sib1Info.sib1,
13394 rgSchCb[cell->instIdx].rgSchInit.region,
13395 rgSchCb[cell->instIdx].rgSchInit.pool,
13396 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13397 #endif/*RGR_SI_SCH*/
13398 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13399 rgSCHCmnDlCalcRvForBcch(cell, FALSE, 0);
13407 i = cell->siCb.siCtx.i;
13408 /*Decrement the retransmission count */
13409 cell->siCb.siCtx.retxCntRem--;
13411 /*Copy the SI msg buff into interface buffer */
13412 if(cell->siCb.siCtx.warningSiFlag == FALSE)
13414 SCpyMsgMsg(cell->siCb.siArray[cell->siCb.siCtx.siId-1].si,
13415 rgSchCb[cell->instIdx].rgSchInit.region,
13416 rgSchCb[cell->instIdx].rgSchInit.pool,
13417 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13421 pdu = rgSCHUtlGetWarningSiPdu(cell);
13422 RGSCH_NULL_CHECK(cell->instIdx, pdu);
13424 rgSchCb[cell->instIdx].rgSchInit.region,
13425 rgSchCb[cell->instIdx].rgSchInit.pool,
13426 &subfrmAlloc->cmnLcInfo.bcchInfo.pdu);
13427 if(cell->siCb.siCtx.retxCntRem == 0)
13429 rgSCHUtlFreeWarningSiPdu(cell);
13430 cell->siCb.siCtx.warningSiFlag = FALSE;
13435 bcch = rgSCHDbmGetSecondBcchOnDlsch(cell);
13436 bo = (RgSchClcBoRpt *)bcch->boLst.first->node;
13438 if(bo->retxCnt != cell->siCfg.retxCnt-1)
13443 #endif/*RGR_SI_SCH*/
13444 subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv =
13445 rgSCHCmnDlCalcRvForBcch(cell, TRUE, i);
13448 /* Added Dl TB count for SIB1 and SI messages transmission.
13449 * This counter will be incremented only for the first transmission
13450 * (with RV 0) of these messages*/
13452 if(subFrm->bcch.pdcch->dci.u.format1aInfo.t.pdschInfo.allocInfo.rv == 0)
13454 cell->dlUlTbCnt.tbTransDlTotalCnt++;
13458 if(bo->retxCnt == 0)
13460 cmLListDelFrm(&bcch->boLst, &bo->boLstEnt);
13461 /* ccpu00117052 - MOD - Passing double pointer
13462 for proper NULLP assignment*/
13463 rgSCHUtlFreeSBuf(cell->instIdx, (Data **)&bo, sizeof(RgSchClcBoRpt));
13465 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, bcch->lcId, sendInd);
13467 /*Fill the interface info */
13468 rgSCHUtlFillRgInfCmnLcInfo(subFrm, subfrmAlloc, NULLD, NULLD);
13470 /* ccpu00132314-ADD-Update the tx power allocation info
13471 TODO-Need to add a check for max tx power per symbol */
13472 subfrmAlloc->cmnLcInfo.bcchInfo.txPwrOffset = cellDl->bcchTxPwrOffset;
13474 /*mBuf has been already copied above */
13475 #endif/*RGR_SI_SCH*/
13488 * Function: rgSCHCmnUlSetAllUnSched
13491 * Invoked by: Common Scheduler
13493 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13498 PRIVATE Void rgSCHCmnUlSetAllUnSched
13500 RgSchCmnUlRbAllocInfo *allocInfo
13503 PRIVATE Void rgSCHCmnUlSetAllUnSched(allocInfo)
13504 RgSchCmnUlRbAllocInfo *allocInfo;
13509 TRC2(rgSCHCmnUlSetAllUnSched);
13511 node = allocInfo->contResLst.first;
13514 rgSCHCmnUlMov2NonSchdCntResLst(allocInfo, (RgSchUeCb *)node->node);
13515 node = allocInfo->contResLst.first;
13518 node = allocInfo->retxUeLst.first;
13521 rgSCHCmnUlMov2NonSchdRetxUeLst(allocInfo, (RgSchUeCb *)node->node);
13522 node = allocInfo->retxUeLst.first;
13525 node = allocInfo->ueLst.first;
13528 rgSCHCmnUlMov2NonSchdUeLst(allocInfo, (RgSchUeCb *)node->node);
13529 node = allocInfo->ueLst.first;
13541 * Function: rgSCHCmnUlAdd2CntResLst
13544 * Invoked by: Common Scheduler
13546 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13547 * @param[in] RgSchUeCb *ue
13552 PUBLIC Void rgSCHCmnUlAdd2CntResLst
13554 RgSchCmnUlRbAllocInfo *allocInfo,
13558 PUBLIC Void rgSCHCmnUlAdd2CntResLst(allocInfo, ue)
13559 RgSchCmnUlRbAllocInfo *allocInfo;
13563 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,ue->cell))->alloc);
13564 TRC2(rgSCHCmnUlAdd2CntResLst);
13565 cmLListAdd2Tail(&allocInfo->contResLst, &ulAllocInfo->reqLnk);
13566 ulAllocInfo->reqLnk.node = (PTR)ue;
13575 * Function: rgSCHCmnUlAdd2UeLst
13578 * Invoked by: Common Scheduler
13580 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
13581 * @param[in] RgSchUeCb *ue
13586 PUBLIC Void rgSCHCmnUlAdd2UeLst
13589 RgSchCmnUlRbAllocInfo *allocInfo,
13593 PUBLIC Void rgSCHCmnUlAdd2UeLst(cell, allocInfo, ue)
13595 RgSchCmnUlRbAllocInfo *allocInfo;
13599 RgSchCmnUeUlAlloc *ulAllocInfo = &((RG_SCH_CMN_GET_UL_UE(ue,cell))->alloc);
13600 TRC2(rgSCHCmnUlAdd2UeLst);
13601 if (ulAllocInfo->reqLnk.node == NULLP)
13603 cmLListAdd2Tail(&allocInfo->ueLst, &ulAllocInfo->reqLnk);
13604 ulAllocInfo->reqLnk.node = (PTR)ue;
13614 * Function: rgSCHCmnAllocUlRb
13615 * Purpose: To do RB allocations for uplink
13617 * Invoked by: Common Scheduler
13619 * @param[in] RgSchCellCb *cell
13620 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
13624 PUBLIC Void rgSCHCmnAllocUlRb
13627 RgSchCmnUlRbAllocInfo *allocInfo
13630 PUBLIC Void rgSCHCmnAllocUlRb(cell, allocInfo)
13632 RgSchCmnUlRbAllocInfo *allocInfo;
13635 RgSchUlSf *sf = allocInfo->sf;
13636 TRC2(rgSCHCmnAllocUlRb);
13638 /* Schedule for new transmissions */
13639 rgSCHCmnUlRbAllocForLst(cell, sf, allocInfo->ueLst.count,
13640 &allocInfo->ueLst, &allocInfo->schdUeLst,
13641 &allocInfo->nonSchdUeLst, (Bool)TRUE);
13645 /***********************************************************
13647 * Func : rgSCHCmnUlRbAllocForLst
13649 * Desc : Allocate for a list in cmn rb alloc information passed
13658 **********************************************************/
13660 PRIVATE Void rgSCHCmnUlRbAllocForLst
13666 CmLListCp *schdLst,
13667 CmLListCp *nonSchdLst,
13671 PRIVATE Void rgSCHCmnUlRbAllocForLst(cell, sf, count, reqLst, schdLst,
13672 nonSchdLst, isNewTx)
13677 CmLListCp *schdLst;
13678 CmLListCp *nonSchdLst;
13687 CmLteTimingInfo timeInfo;
13690 TRC2(rgSCHCmnUlRbAllocForLst);
13692 if(schdLst->count == 0)
13694 cmLListInit(schdLst);
13697 cmLListInit(nonSchdLst);
13699 if(isNewTx == TRUE)
13701 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.numUes = (U8) count;
13703 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime, timeInfo, TFU_ULCNTRL_DLDELTA);
13704 k = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][timeInfo.subframe];
13705 RG_SCH_ADD_TO_CRNT_TIME(timeInfo,
13706 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo, k);
13708 RG_SCH_ADD_TO_CRNT_TIME(cell->crntTime,cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.timingInfo,
13709 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
13714 for (lnk = reqLst->first; count; lnk = lnk->next, --count)
13716 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13717 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13722 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
13727 ueUl->subbandShare = ueUl->subbandRequired;
13728 if(isNewTx == TRUE)
13730 maxRb = RGSCH_MIN((ueUl->subbandRequired * MAX_5GTF_VRBG_SIZE), ue->ue5gtfCb.maxPrb);
13732 ret = rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole);
13735 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, schdLst);
13736 rgSCHCmnUlUeFillAllocInfo(cell, ue);
13740 gUl5gtfRbAllocFail++;
13741 #if defined (TENB_STATS) && defined (RG_5GTF)
13742 cell->tenbStats->sch.ul5gtfRbAllocFail++;
13744 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13745 ue->isMsg4PdcchWithCrnti = FALSE;
13746 ue->isSrGrant = FALSE;
13749 if(isNewTx == TRUE)
13751 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13752 ulAllocInfo[count - 1].rnti = ue->ueId;
13753 cell->sfAllocArr[cell->crntSfIdx].ulUeInfo.
13754 ulAllocInfo[count - 1].numPrb = ue->ul.nPrb;
13757 ueUl->subbandShare = 0; /* This reset will take care of
13758 * all scheduler types */
13760 for (; count; lnk = lnk->next, --count)
13762 RgSchUeCb *ue = (RgSchUeCb *)lnk->node;
13763 rgSCHCmnUlRbAllocAddUeToLst(cell, ue, nonSchdLst);
13764 ue->isMsg4PdcchWithCrnti = FALSE;
13771 /***********************************************************
13773 * Func : rgSCHCmnUlMdfyGrntForCqi
13775 * Desc : Modify UL Grant to consider presence of
13776 * CQI along with PUSCH Data.
13781 * - Scale down iTbs based on betaOffset and
13782 * size of Acqi Size.
13783 * - Optionally attempt to increase numSb by 1
13784 * if input payload size does not fit in due
13785 * to reduced tbSz as a result of iTbsNew.
13789 **********************************************************/
13791 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi
13803 PRIVATE S16 rgSCHCmnUlMdfyGrntForCqi(cell, ue, maxRb, numSb, iTbs, hqSz, stepDownItbs, effTgt)
13814 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(ue->cell);
13819 U32 remREsForPusch;
13822 U32 betaOffVal = ue->ul.betaOffstVal;
13823 U32 cqiRiRptSz = ue->ul.cqiRiSz;
13824 U32 betaOffHqVal = rgSchCmnBetaHqOffstTbl[ue->ul.betaHqOffst];
13825 U32 resNumSb = *numSb;
13826 U32 puschEff = 1000;
13829 Bool mdfyiTbsFlg = FALSE;
13830 U8 resiTbs = *iTbs;
13832 TRC2(rgSCHCmnUlMdfyGrntForCqi)
13837 iMcs = rgSCHCmnUlGetIMcsFrmITbs(resiTbs, RG_SCH_CMN_GET_UE_CTGY(ue));
13838 RG_SCH_UL_MCS_TO_MODODR(iMcs, modOdr);
13839 if (RG_SCH_CMN_GET_UE_CTGY(ue) != CM_LTE_UE_CAT_5)
13841 modOdr = RGSCH_MIN(RGSCH_QM_QPSK, modOdr);
13845 modOdr = RGSCH_MIN(RGSCH_QM_64QAM, modOdr);
13847 nPrb = resNumSb * cellUl->sbSize;
13848 /* Restricting the minumum iTbs requried to modify to 10 */
13849 if ((nPrb >= maxRb) && (resiTbs <= 10))
13851 /* Could not accomodate ACQI */
13854 totREs = nPrb * RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
13855 tbSz = rgTbSzTbl[0][resiTbs][nPrb-1];
13856 /* totalREs/tbSz = num of bits perRE. */
13857 cqiRiREs = (totREs * betaOffVal * cqiRiRptSz)/(1000 * tbSz); /* betaOffVal is represented
13858 as parts per 1000 */
13859 hqREs = (totREs * betaOffHqVal * hqSz)/(1000 * tbSz);
13860 if ((cqiRiREs + hqREs) < totREs)
13862 remREsForPusch = totREs - cqiRiREs - hqREs;
13863 bitsPerRe = (tbSz * 1000)/remREsForPusch; /* Multiplying by 1000 for Interger Oper */
13864 puschEff = bitsPerRe/modOdr;
13866 if (puschEff < effTgt)
13868 /* ensure resultant efficiency for PUSCH Data is within 0.93*/
13873 /* Alternate between increasing SB or decreasing iTbs until eff is met */
13874 if (mdfyiTbsFlg == FALSE)
13878 resNumSb = resNumSb + 1;
13880 mdfyiTbsFlg = TRUE;
13886 resiTbs-= stepDownItbs;
13888 mdfyiTbsFlg = FALSE;
13891 }while (1); /* Loop breaks if efficency is met
13892 or returns RFAILED if not able to meet the efficiency */
13901 /***********************************************************
13903 * Func : rgSCHCmnUlRbAllocForUe
13905 * Desc : Do uplink RB allocation for an UE.
13909 * Notes: Note that as of now, for retx, maxRb
13910 * is not considered. Alternatives, such
13911 * as dropping retx if it crosses maxRb
13912 * could be considered.
13916 **********************************************************/
13918 PRIVATE S16 rgSCHCmnUlRbAllocForUe
13927 PRIVATE S16 rgSCHCmnUlRbAllocForUe(cell, sf, ue, maxRb, hole)
13935 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
13936 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue, cell);
13937 RgSchUlAlloc *alloc = NULLP;
13943 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->schdHqProcIdx];
13945 RgSchUlHqProcCb *proc = NULLP;
13951 TfuDciFormat dciFrmt;
13955 TRC2(rgSCHCmnUlRbAllocForUe);
13957 rgSCHUhmGetAvlHqProc(cell, ue, &proc);
13960 //printf("UE [%d] HQ Proc unavailable\n", ue->ueId);
13965 if (ue->ue5gtfCb.rank == 2)
13967 dciFrmt = TFU_DCI_FORMAT_A2;
13972 dciFrmt = TFU_DCI_FORMAT_A1;
13975 /* 5gtf TODO : To pass dci frmt to this function */
13976 pdcch = rgSCHCmnPdcchAllocCrntSf(cell, ue);
13979 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
13980 "rgSCHCmnUlRbAllocForUe(): Could not get PDCCH for CRNTI:%d",ue->ueId);
13983 gUl5gtfPdcchSchd++;
13984 #if defined (TENB_STATS) && defined (RG_5GTF)
13985 cell->tenbStats->sch.ul5gtfPdcchSchd++;
13988 //TODO_SID using configured prb as of now
13989 nPrb = ue->ue5gtfCb.maxPrb;
13990 reqVrbg = nPrb/MAX_5GTF_VRBG_SIZE;
13991 iMcs = ue->ue5gtfCb.mcs; //gSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
13995 if((sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart > MAX_5GTF_VRBG)
13996 || (sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated > MAX_5GTF_VRBG))
13998 printf("5GTF_ERROR vrbg > 25 valstart = %d valalloc %d\n", sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart
13999 , sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated);
14004 /*TODO_SID: Workaround for alloc. Currently alloc is ulsf based. To handle multiple beams, we need a different
14005 design. Now alloc are formed based on MAX_5GTF_UE_SCH macro. */
14006 numVrbgTemp = MAX_5GTF_VRBG/MAX_5GTF_UE_SCH;
14009 alloc = rgSCHCmnUlSbAlloc(sf, numVrbgTemp,\
14012 if (alloc == NULLP)
14014 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
14015 "rgSCHCmnUlRbAllocForUe(): Could not get UlAlloc %d CRNTI:%d",numVrbg,ue->ueId);
14016 rgSCHCmnPdcchRlsCrntSf(cell, pdcch);
14019 gUl5gtfAllocAllocated++;
14020 #if defined (TENB_STATS) && defined (RG_5GTF)
14021 cell->tenbStats->sch.ul5gtfAllocAllocated++;
14023 alloc->grnt.vrbgStart = sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart;
14024 alloc->grnt.numVrbg = numVrbg;
14025 alloc->grnt.numLyr = numLyr;
14026 alloc->grnt.dciFrmt = dciFrmt;
14028 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].vrbgStart += numVrbg;
14029 sf->sfBeamInfo[ue->ue5gtfCb.BeamId].totVrbgAllocated += numVrbg;
14031 //rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
14033 sf->totPrb += alloc->grnt.numRb;
14034 ue->ul.nPrb = alloc->grnt.numRb;
14036 if (ue->csgMmbrSta != TRUE)
14038 cellUl->ncsgPrbCnt += alloc->grnt.numRb;
14040 cellUl->totPrbCnt += (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14041 alloc->pdcch = pdcch;
14042 alloc->grnt.iMcs = iMcs;
14043 alloc->grnt.iMcsCrnt = iMcsCrnt;
14044 alloc->grnt.hop = 0;
14045 /* Initial Num RBs support for UCI on PUSCH */
14047 ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
14049 alloc->forMsg3 = FALSE;
14050 //RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTb5gtfSzTbl[0], (iTbs));
14052 //ueUl->alloc.allocdBytes = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
14053 /* TODO_SID Allocating based on configured MCS as of now.
14054 Currently for format A2. When doing multi grp per tti, need to update this. */
14055 ueUl->alloc.allocdBytes = (rgSch5gtfTbSzTbl[iMcs]/8) * ue->ue5gtfCb.rank;
14057 alloc->grnt.datSz = ueUl->alloc.allocdBytes;
14058 //TODO_SID Need to check mod order.
14059 RG_SCH_CMN_TBS_TO_MODODR(iMcs, alloc->grnt.modOdr);
14060 //alloc->grnt.modOdr = 6;
14061 alloc->grnt.isRtx = FALSE;
14063 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
14064 alloc->grnt.SCID = 0;
14065 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
14066 alloc->grnt.PMI = 0;
14067 alloc->grnt.uciOnxPUSCH = 0;
14068 alloc->grnt.hqProcId = proc->procId;
14070 alloc->hqProc = proc;
14071 alloc->hqProc->ulSfIdx = cellUl->schdIdx;
14073 /*commenting to retain the rnti used for transmission SPS/c-rnti */
14074 alloc->rnti = ue->ueId;
14075 ueUl->alloc.alloc = alloc;
14076 /*rntiwari-Adding the debug for generating the graph.*/
14077 /* No grant attr recorded now */
14081 /***********************************************************
14083 * Func : rgSCHCmnUlRbAllocAddUeToLst
14085 * Desc : Add UE to list (scheduled/non-scheduled list)
14086 * for UL RB allocation information.
14094 **********************************************************/
14096 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst
14103 PUBLIC Void rgSCHCmnUlRbAllocAddUeToLst(cell, ue, lst)
14109 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
14110 TRC2(rgSCHCmnUlRbAllocAddUeToLst);
14113 gUl5gtfUeRbAllocDone++;
14114 #if defined (TENB_STATS) && defined (RG_5GTF)
14115 cell->tenbStats->sch.ul5gtfUeRbAllocDone++;
14117 cmLListAdd2Tail(lst, &ueUl->alloc.schdLstLnk);
14118 ueUl->alloc.schdLstLnk.node = (PTR)ue;
14123 * @brief This function Processes the Final Allocations
14124 * made by the RB Allocator against the requested.
14128 * Function: rgSCHCmnUlAllocFnlz
14129 * Purpose: This function Processes the Final Allocations
14130 * made by the RB Allocator against the requested.
14132 * Invoked by: Common Scheduler
14134 * @param[in] RgSchCellCb *cell
14135 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14140 PRIVATE Void rgSCHCmnUlAllocFnlz
14143 RgSchCmnUlRbAllocInfo *allocInfo
14146 PRIVATE Void rgSCHCmnUlAllocFnlz(cell, allocInfo)
14148 RgSchCmnUlRbAllocInfo *allocInfo;
14151 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14152 TRC2(rgSCHCmnUlAllocFnlz);
14154 /* call scheduler specific Finalization */
14155 cellSch->apisUl->rgSCHUlAllocFnlz(cell, allocInfo);
14161 * @brief This function Processes the Final Allocations
14162 * made by the RB Allocator against the requested.
14166 * Function: rgSCHCmnDlAllocFnlz
14167 * Purpose: This function Processes the Final Allocations
14168 * made by the RB Allocator against the requested.
14170 * Invoked by: Common Scheduler
14172 * @param[in] RgSchCellCb *cell
14177 PUBLIC Void rgSCHCmnDlAllocFnlz
14182 PUBLIC Void rgSCHCmnDlAllocFnlz(cell)
14186 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14187 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
14189 TRC2(rgSCHCmnDlAllocFnlz);
14191 rgSCHCmnDlCcchRetxFnlz(cell, allocInfo);
14192 rgSCHCmnDlCcchTxFnlz(cell, allocInfo);
14194 /* Added below functions for handling CCCH SDU transmission received
14196 * * guard timer expiry*/
14197 rgSCHCmnDlCcchSduRetxFnlz(cell, allocInfo);
14198 rgSCHCmnDlCcchSduTxFnlz(cell, allocInfo);
14200 rgSCHCmnDlRaRspFnlz(cell, allocInfo);
14201 /* call scheduler specific Finalization */
14202 cellSch->apisDl->rgSCHDlAllocFnlz(cell, allocInfo);
14204 /* Stack Crash problem for TRACE5 Changes. Added the return below */
14211 * @brief Update an uplink subframe.
14215 * Function : rgSCHCmnUlUpdSf
14217 * For each allocation
14218 * - if no more tx needed
14219 * - Release allocation
14221 * - Perform retransmission
14223 * @param[in] RgSchUlSf *sf
14227 PRIVATE Void rgSCHCmnUlUpdSf
14230 RgSchCmnUlRbAllocInfo *allocInfo,
14234 PRIVATE Void rgSCHCmnUlUpdSf(cell, allocInfo, sf)
14236 RgSchCmnUlRbAllocInfo *allocInfo;
14241 TRC2(rgSCHCmnUlUpdSf);
14243 while ((lnk = sf->allocs.first))
14245 RgSchUlAlloc *alloc = (RgSchUlAlloc *)lnk->node;
14248 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
14253 /* If need to handle all retx together, run another loop separately */
14254 rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc);
14256 rgSCHCmnUlRlsUlAlloc(cell, sf, alloc);
14259 /* By this time, all allocs would have been cleared and
14260 * SF is reset to be made ready for new allocations. */
14261 rgSCHCmnUlSfReset(cell, sf);
14262 /* In case there are timing problems due to msg3
14263 * allocations being done in advance, (which will
14264 * probably happen with the current FDD code that
14265 * handles 8 subframes) one solution
14266 * could be to hold the (recent) msg3 allocs in a separate
14267 * list, and then possibly add that to the actual
14268 * list later. So at this time while allocations are
14269 * traversed, the recent msg3 ones are not seen. Anytime after
14270 * this (a good time is when the usual allocations
14271 * are made), msg3 allocations could be transferred to the
14272 * normal list. Not doing this now as it is assumed
14273 * that incorporation of TDD shall take care of this.
14281 * @brief Handle uplink allocation for retransmission.
14285 * Function : rgSCHCmnUlHndlAllocRetx
14287 * Processing Steps:
14288 * - Add to queue for retx.
14289 * - Do not release here, release happends as part
14290 * of the loop that calls this function.
14292 * @param[in] RgSchCellCb *cell
14293 * @param[in] RgSchCmnUlRbAllocInfo *allocInfo
14294 * @param[in] RgSchUlSf *sf
14295 * @param[in] RgSchUlAlloc *alloc
14299 PRIVATE Void rgSCHCmnUlHndlAllocRetx
14302 RgSchCmnUlRbAllocInfo *allocInfo,
14304 RgSchUlAlloc *alloc
14307 PRIVATE Void rgSCHCmnUlHndlAllocRetx(cell, allocInfo, sf, alloc)
14309 RgSchCmnUlRbAllocInfo *allocInfo;
14311 RgSchUlAlloc *alloc;
14315 RgSchCmnUlUe *ueUl;
14316 TRC2(rgSCHCmnUlHndlAllocRetx);
14318 rgTbSzTbl[0][rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs)]\
14319 [alloc->grnt.numRb-1]/8;
14320 if (!alloc->forMsg3)
14322 ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue);
14323 ueUl->alloc.reqBytes = bytes;
14324 rgSCHUhmRetx(alloc->hqProc);
14325 rgSCHCmnUlAdd2RetxUeLst(allocInfo, alloc->ue);
14329 /* RACHO msg3 retx handling. Part of RACH procedure changes. */
14330 retxAlloc = rgSCHCmnUlGetUlAlloc(cell, sf, alloc->numSb);
14331 if (retxAlloc == NULLP)
14333 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
14334 "rgSCHCmnUlRbAllocForUe():Could not get UlAlloc for msg3Retx RNTI:%d",
14338 retxAlloc->grnt.iMcs = alloc->grnt.iMcs;
14339 retxAlloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl\
14340 [alloc->hqProc->rvIdx];
14341 retxAlloc->grnt.nDmrs = 0;
14342 retxAlloc->grnt.hop = 0;
14343 retxAlloc->grnt.delayBit = 0;
14344 retxAlloc->rnti = alloc->rnti;
14345 retxAlloc->ue = NULLP;
14346 retxAlloc->pdcch = FALSE;
14347 retxAlloc->forMsg3 = TRUE;
14348 retxAlloc->raCb = alloc->raCb;
14349 retxAlloc->hqProc = alloc->hqProc;
14350 rgSCHUhmRetx(retxAlloc->hqProc);
14357 * @brief Uplink Scheduling Handler.
14361 * Function: rgSCHCmnUlAlloc
14362 * Purpose: This function Handles Uplink Scheduling.
14364 * Invoked by: Common Scheduler
14366 * @param[in] RgSchCellCb *cell
14369 /* ccpu00132653- The definition of this function made common for TDD and FDD*/
14371 PRIVATE Void rgSCHCmnUlAlloc
14376 PRIVATE Void rgSCHCmnUlAlloc(cell)
14380 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14381 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
14382 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
14383 RgSchCmnUlRbAllocInfo allocInfo;
14384 RgSchCmnUlRbAllocInfo *allocInfoRef = &allocInfo;
14390 TRC2(rgSCHCmnUlAlloc);
14392 /* Initializing RgSchCmnUlRbAllocInfo structure */
14393 rgSCHCmnInitUlRbAllocInfo(allocInfoRef);
14395 /* Get Uplink Subframe */
14396 allocInfoRef->sf = &cellUl->ulSfArr[cellUl->schdIdx];
14398 /* initializing the UL PRB count */
14399 allocInfoRef->sf->totPrb = 0;
14403 rgSCHCmnSpsUlTti(cell, allocInfoRef);
14406 if(*allocInfoRef->sf->allocCountRef == 0)
14410 if ((hole = rgSCHUtlUlHoleFirst(allocInfoRef->sf)) != NULLP)
14412 /* Sanity check of holeDb */
14413 if (allocInfoRef->sf->holeDb->count == 1 && hole->start == 0)
14415 hole->num = cell->dynCfiCb.bwInfo[cellDl->currCfi].numSb;
14416 /* Re-Initialize available subbands because of CFI change*/
14417 allocInfoRef->sf->availSubbands = cell->dynCfiCb.\
14418 bwInfo[cellDl->currCfi].numSb;
14419 /*Currently initializing 5gtf ulsf specific initialization here.
14420 need to do at proper place */
14422 allocInfoRef->sf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
14423 allocInfoRef->sf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
14424 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
14426 allocInfoRef->sf->sfBeamInfo[idx].totVrbgAllocated = 0;
14427 allocInfoRef->sf->sfBeamInfo[idx].totVrbgRequired = 0;
14428 allocInfoRef->sf->sfBeamInfo[idx].vrbgStart = 0;
14434 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
14435 "Error! holeDb sanity check failed");
14440 /* Fix: Adaptive re-transmissions prioritised over other transmissions */
14441 /* perform adaptive retransmissions */
14442 rgSCHCmnUlSfReTxAllocs(cell, allocInfoRef->sf);
14446 /* Fix: syed Adaptive Msg3 Retx crash. Release all
14447 Harq processes for which adap Retx failed, to avoid
14448 blocking. This step should be done before New TX
14449 scheduling to make hqProc available. Right now we
14450 dont check if proc is in adap Retx list for considering
14451 it to be available. But now with this release that
14452 functionality would be correct. */
14454 rgSCHCmnUlSfRlsRetxProcs(cell, allocInfoRef->sf);
14457 /* Specific UL scheduler to perform UE scheduling */
14458 cellSch->apisUl->rgSCHUlSched(cell, allocInfoRef);
14460 /* Call UL RB allocator module */
14461 rgSCHCmnAllocUlRb(cell, allocInfoRef);
14463 /* Do group power control for PUSCH */
14464 rgSCHCmnGrpPwrCntrlPusch(cell, allocInfoRef->sf);
14466 cell->sc.apis->rgSCHDrxStrtInActvTmrInUl(cell);
14468 rgSCHCmnUlAllocFnlz(cell, allocInfoRef);
14469 if(5000 == g5gtfTtiCnt)
14471 ul5gtfsidDlAlreadyMarkUl = 0;
14472 ul5gtfsidDlSchdPass = 0;
14473 ul5gtfsidUlMarkUl = 0;
14474 ul5gtfTotSchdCnt = 0;
14482 * @brief send Subframe Allocations.
14486 * Function: rgSCHCmnSndCnsldtInfo
14487 * Purpose: Send the scheduled
14488 * allocations to MAC for StaInd generation to Higher layers and
14489 * for MUXing. PST's RgInfSfAlloc to MAC instance.
14491 * Invoked by: Common Scheduler
14493 * @param[in] RgSchCellCb *cell
14497 PUBLIC Void rgSCHCmnSndCnsldtInfo
14502 PUBLIC Void rgSCHCmnSndCnsldtInfo(cell)
14506 RgInfSfAlloc *subfrmAlloc;
14508 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14510 TRC2(rgSCHCmnSndCnsldtInfo);
14512 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14514 /* Send the allocations to MAC for MUXing */
14515 rgSCHUtlGetPstToLyr(&pst, &rgSchCb[cell->instIdx], cell->macInst);
14516 subfrmAlloc->cellId = cell->cellId;
14517 /* Populate the List of UEs needing PDB-based Flow control */
14518 cellSch->apisDl->rgSCHDlFillFlwCtrlInfo(cell, subfrmAlloc);
14520 if((subfrmAlloc->rarInfo.numRaRntis) ||
14522 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14523 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14524 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14526 (subfrmAlloc->ueInfo.numUes) ||
14527 (subfrmAlloc->cmnLcInfo.bitMask) ||
14528 (subfrmAlloc->ulUeInfo.numUes) ||
14529 (subfrmAlloc->flowCntrlInfo.numUes))
14531 if((subfrmAlloc->rarInfo.numRaRntis) ||
14533 (subfrmAlloc->emtcInfo.rarInfo.numRaRntis) ||
14534 (subfrmAlloc->emtcInfo.cmnLcInfo.bitMask) ||
14535 (subfrmAlloc->emtcInfo.ueInfo.numUes) ||
14537 (subfrmAlloc->ueInfo.numUes) ||
14538 (subfrmAlloc->cmnLcInfo.bitMask) ||
14539 (subfrmAlloc->flowCntrlInfo.numUes))
14542 RgSchMacSfAlloc(&pst, subfrmAlloc);
14545 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_NUM_SUB_FRAMES;
14547 cell->crntSfIdx = (cell->crntSfIdx + 1) % RGSCH_SF_ALLOC_SIZE;
14553 * @brief Consolidate Subframe Allocations.
14557 * Function: rgSCHCmnCnsldtSfAlloc
14558 * Purpose: Consolidate Subframe Allocations.
14560 * Invoked by: Common Scheduler
14562 * @param[in] RgSchCellCb *cell
14566 PUBLIC Void rgSCHCmnCnsldtSfAlloc
14571 PUBLIC Void rgSCHCmnCnsldtSfAlloc(cell)
14575 RgInfSfAlloc *subfrmAlloc;
14576 CmLteTimingInfo frm;
14578 CmLListCp dlDrxInactvTmrLst;
14579 CmLListCp dlInActvLst;
14580 CmLListCp ulInActvLst;
14581 RgSchCmnCell *cellSch = NULLP;
14583 TRC2(rgSCHCmnCnsldtSfAlloc);
14585 cmLListInit(&dlDrxInactvTmrLst);
14586 cmLListInit(&dlInActvLst);
14587 cmLListInit(&ulInActvLst);
14589 subfrmAlloc = &(cell->sfAllocArr[cell->crntSfIdx]);
14591 /* Get Downlink Subframe */
14592 frm = cell->crntTime;
14593 RGSCH_INCR_SUB_FRAME(frm, RG_SCH_CMN_DL_DELTA);
14594 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14596 /* Fill the allocation Info */
14597 rgSCHUtlFillRgInfRarInfo(dlSf, subfrmAlloc, cell);
14600 rgSCHUtlFillRgInfUeInfo(dlSf, cell, &dlDrxInactvTmrLst,
14601 &dlInActvLst, &ulInActvLst);
14602 #ifdef RG_PFS_STATS
14603 cell->totalPrb += dlSf->bwAssigned;
14605 /* Mark the following Ues inactive for UL*/
14606 cellSch = RG_SCH_CMN_GET_CELL(cell);
14608 /* Calling Scheduler specific function with DRX inactive UE list*/
14609 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInActvLst);
14610 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInActvLst);
14613 /*re/start DRX inactivity timer for the UEs*/
14614 (Void)rgSCHDrxStrtInActvTmr(cell,&dlDrxInactvTmrLst,RG_SCH_DRX_DL);
14620 * @brief Initialize the DL Allocation Information Structure.
14624 * Function: rgSCHCmnInitDlRbAllocInfo
14625 * Purpose: Initialize the DL Allocation Information Structure.
14627 * Invoked by: Common Scheduler
14629 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
14633 PRIVATE Void rgSCHCmnInitDlRbAllocInfo
14635 RgSchCmnDlRbAllocInfo *allocInfo
14638 PRIVATE Void rgSCHCmnInitDlRbAllocInfo(allocInfo)
14639 RgSchCmnDlRbAllocInfo *allocInfo;
14642 TRC2(rgSCHCmnInitDlRbAllocInfo);
14643 cmMemset((U8 *)&allocInfo->pcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14644 cmMemset((U8 *)&allocInfo->bcchAlloc, (U8)0, sizeof(RgSchDlRbAlloc));
14645 cmMemset((U8 *)allocInfo->raRspAlloc, (U8)0,
14646 RG_SCH_CMN_MAX_CMN_PDCCH*sizeof(RgSchDlRbAlloc));
14648 allocInfo->msg4Alloc.msg4DlSf = NULLP;
14649 cmLListInit(&allocInfo->msg4Alloc.msg4TxLst);
14650 cmLListInit(&allocInfo->msg4Alloc.msg4RetxLst);
14651 cmLListInit(&allocInfo->msg4Alloc.schdMsg4TxLst);
14652 cmLListInit(&allocInfo->msg4Alloc.schdMsg4RetxLst);
14653 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4TxLst);
14654 cmLListInit(&allocInfo->msg4Alloc.nonSchdMsg4RetxLst);
14656 allocInfo->ccchSduAlloc.ccchSduDlSf = NULLP;
14657 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduTxLst);
14658 cmLListInit(&allocInfo->ccchSduAlloc.ccchSduRetxLst);
14659 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduTxLst);
14660 cmLListInit(&allocInfo->ccchSduAlloc.schdCcchSduRetxLst);
14661 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduTxLst);
14662 cmLListInit(&allocInfo->ccchSduAlloc.nonSchdCcchSduRetxLst);
14665 allocInfo->dedAlloc.dedDlSf = NULLP;
14666 cmLListInit(&allocInfo->dedAlloc.txHqPLst);
14667 cmLListInit(&allocInfo->dedAlloc.retxHqPLst);
14668 cmLListInit(&allocInfo->dedAlloc.schdTxHqPLst);
14669 cmLListInit(&allocInfo->dedAlloc.schdRetxHqPLst);
14670 cmLListInit(&allocInfo->dedAlloc.nonSchdTxHqPLst);
14671 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxHqPLst);
14673 cmLListInit(&allocInfo->dedAlloc.txRetxHqPLst);
14674 cmLListInit(&allocInfo->dedAlloc.schdTxRetxHqPLst);
14675 cmLListInit(&allocInfo->dedAlloc.nonSchdTxRetxHqPLst);
14677 cmLListInit(&allocInfo->dedAlloc.txSpsHqPLst);
14678 cmLListInit(&allocInfo->dedAlloc.retxSpsHqPLst);
14679 cmLListInit(&allocInfo->dedAlloc.schdTxSpsHqPLst);
14680 cmLListInit(&allocInfo->dedAlloc.schdRetxSpsHqPLst);
14681 cmLListInit(&allocInfo->dedAlloc.nonSchdTxSpsHqPLst);
14682 cmLListInit(&allocInfo->dedAlloc.nonSchdRetxSpsHqPLst);
14686 rgSCHLaaCmnInitDlRbAllocInfo (allocInfo);
14689 cmLListInit(&allocInfo->dedAlloc.errIndTxHqPLst);
14690 cmLListInit(&allocInfo->dedAlloc.schdErrIndTxHqPLst);
14691 cmLListInit(&allocInfo->dedAlloc.nonSchdErrIndTxHqPLst);
14696 * @brief Initialize the UL Allocation Information Structure.
14700 * Function: rgSCHCmnInitUlRbAllocInfo
14701 * Purpose: Initialize the UL Allocation Information Structure.
14703 * Invoked by: Common Scheduler
14705 * @param[out] RgSchCmnUlRbAllocInfo *allocInfo
14709 PUBLIC Void rgSCHCmnInitUlRbAllocInfo
14711 RgSchCmnUlRbAllocInfo *allocInfo
14714 PUBLIC Void rgSCHCmnInitUlRbAllocInfo(allocInfo)
14715 RgSchCmnUlRbAllocInfo *allocInfo;
14718 TRC2(rgSCHCmnInitUlRbAllocInfo);
14719 allocInfo->sf = NULLP;
14720 cmLListInit(&allocInfo->contResLst);
14721 cmLListInit(&allocInfo->schdContResLst);
14722 cmLListInit(&allocInfo->nonSchdContResLst);
14723 cmLListInit(&allocInfo->ueLst);
14724 cmLListInit(&allocInfo->schdUeLst);
14725 cmLListInit(&allocInfo->nonSchdUeLst);
14731 * @brief Scheduling for PUCCH group power control.
14735 * Function: rgSCHCmnGrpPwrCntrlPucch
14736 * Purpose: This function does group power control for PUCCH
14737 * corresponding to the subframe for which DL UE allocations
14740 * Invoked by: Common Scheduler
14742 * @param[in] RgSchCellCb *cell
14746 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch
14752 PRIVATE Void rgSCHCmnGrpPwrCntrlPucch(cell, dlSf)
14757 TRC2(rgSCHCmnGrpPwrCntrlPucch);
14759 rgSCHPwrGrpCntrlPucch(cell, dlSf);
14765 * @brief Scheduling for PUSCH group power control.
14769 * Function: rgSCHCmnGrpPwrCntrlPusch
14770 * Purpose: This function does group power control, for
14771 * the subframe for which UL allocation has (just) happened.
14773 * Invoked by: Common Scheduler
14775 * @param[in] RgSchCellCb *cell
14776 * @param[in] RgSchUlSf *ulSf
14780 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch
14786 PRIVATE Void rgSCHCmnGrpPwrCntrlPusch(cell, ulSf)
14791 /*removed unused variable *cellSch*/
14792 CmLteTimingInfo frm;
14795 TRC2(rgSCHCmnGrpPwrCntrlPusch);
14797 /* Got to pass DL SF corresponding to UL SF, so get that first.
14798 * There is no easy way of getting dlSf by having the RgSchUlSf*,
14799 * so use the UL delta from current time to get the DL SF. */
14800 frm = cell->crntTime;
14803 if(cell->emtcEnable == TRUE)
14805 RGSCH_INCR_SUB_FRAME_EMTC(frm, TFU_DLCNTRL_DLDELTA);
14810 RGSCH_INCR_SUB_FRAME(frm, TFU_DLCNTRL_DLDELTA);
14812 /* Del filling of dl.time */
14813 dlSf = rgSCHUtlSubFrmGet(cell, frm);
14815 rgSCHPwrGrpCntrlPusch(cell, dlSf, ulSf);
14820 /* Fix: syed align multiple UEs to refresh at same time */
14821 /***********************************************************
14823 * Func : rgSCHCmnApplyUeRefresh
14825 * Desc : Apply UE refresh in CMN and Specific
14826 * schedulers. Data rates and corresponding
14827 * scratchpad variables are updated.
14835 **********************************************************/
14837 PRIVATE S16 rgSCHCmnApplyUeRefresh
14843 PRIVATE S16 rgSCHCmnApplyUeRefresh(cell, ue)
14848 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
14850 U32 effNonGbrBsr = 0;
14853 TRC2(rgSCHCmnApplyUeRefresh);
14855 /* Reset the refresh cycle variableCAP */
14856 ue->ul.effAmbr = ue->ul.cfgdAmbr;
14858 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
14860 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
14862 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
14864 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
14866 cmnLcg->effGbr = cmnLcg->cfgdGbr;
14867 cmnLcg->effDeltaMbr = cmnLcg->deltaMbr;
14868 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
14869 /* Considering GBR LCG will be prioritised by UE */
14870 effGbrBsr += cmnLcg->bs;
14871 }/* Else no remaing BS so nonLcg0 will be updated when BSR will be received */
14874 effNonGbrBsr += cmnLcg->reportedBs;
14875 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
14879 effNonGbrBsr = RGSCH_MIN(effNonGbrBsr,ue->ul.effAmbr);
14880 ue->ul.nonGbrLcgBs = effNonGbrBsr;
14882 ue->ul.nonLcg0Bs = effGbrBsr + effNonGbrBsr;
14883 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
14884 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
14887 /* call scheduler specific event handlers
14888 * for refresh timer expiry */
14889 cellSch->apisUl->rgSCHUlUeRefresh(cell, ue);
14890 cellSch->apisDl->rgSCHDlUeRefresh(cell, ue);
14895 /***********************************************************
14897 * Func : rgSCHCmnTmrExpiry
14899 * Desc : Adds an UE to refresh queue, so that the UE is
14900 * periodically triggered to refresh it's GBR and
14909 **********************************************************/
14911 PRIVATE S16 rgSCHCmnTmrExpiry
14913 PTR cb, /* Pointer to timer control block */
14914 S16 tmrEvnt /* Timer Event */
14917 PRIVATE S16 rgSCHCmnTmrExpiry(cb, tmrEvnt)
14918 PTR cb; /* Pointer to timer control block */
14919 S16 tmrEvnt; /* Timer Event */
14922 RgSchUeCb *ue = (RgSchUeCb *)cb;
14923 RgSchCellCb *cell = ue->cell;
14924 #if (ERRCLASS & ERRCLS_DEBUG)
14927 TRC2(rgSCHCmnTmrExpiry);
14929 #if (ERRCLASS & ERRCLS_DEBUG)
14930 if (tmrEvnt != RG_SCH_CMN_EVNT_UE_REFRESH)
14932 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnTmrExpiry(): Invalid "
14933 "timer event CRNTI:%d",ue->ueId);
14940 rgSCHCmnApplyUeRefresh(cell, ue);
14942 rgSCHCmnAddUeToRefreshQ(cell, ue, RG_SCH_CMN_REFRESH_TIME);
14947 /***********************************************************
14949 * Func : rgSCHCmnTmrProc
14951 * Desc : Timer entry point per cell. Timer
14952 * processing is triggered at every frame boundary
14961 **********************************************************/
14963 PRIVATE S16 rgSCHCmnTmrProc
14968 PRIVATE S16 rgSCHCmnTmrProc(cell)
14972 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
14973 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
14974 /* Moving the assignment of scheduler pointer
14975 to available scope for optimization */
14976 TRC2(rgSCHCmnTmrProc);
14978 if ((cell->crntTime.slot % RGSCH_NUM_SUB_FRAMES_5G) == 0)
14980 /* Reset the counters periodically */
14981 if ((cell->crntTime.sfn % RG_SCH_CMN_CSG_REFRESH_TIME) == 0)
14983 RG_SCH_RESET_HCSG_DL_PRB_CNTR(cmnDlCell);
14984 RG_SCH_RESET_HCSG_UL_PRB_CNTR(cmnUlCell);
14986 if ((cell->crntTime.sfn % RG_SCH_CMN_OVRLDCTRL_REFRESH_TIME) == 0)
14989 cell->measurements.ulTpt = ((cell->measurements.ulTpt * 95) + ( cell->measurements.ulBytesCnt * 5))/100;
14990 cell->measurements.dlTpt = ((cell->measurements.dlTpt * 95) + ( cell->measurements.dlBytesCnt * 5))/100;
14992 rgSCHUtlCpuOvrLdAdjItbsCap(cell);
14993 /* reset cell level tpt measurements for next cycle */
14994 cell->measurements.ulBytesCnt = 0;
14995 cell->measurements.dlBytesCnt = 0;
14997 /* Comparing with Zero instead of % is being done for efficiency.
14998 * If Timer resolution changes then accordingly update the
14999 * macro RG_SCH_CMN_REFRESH_TIMERES */
15000 RgSchCmnCell *sched = RG_SCH_CMN_GET_CELL(cell);
15001 cmPrcTmr(&sched->tmrTqCp, sched->tmrTq, (PFV)rgSCHCmnTmrExpiry);
15008 /***********************************************************
15010 * Func : rgSchCmnUpdCfiVal
15012 * Desc : Update the CFI value if CFI switch was done
15020 **********************************************************/
15022 PRIVATE Void rgSchCmnUpdCfiVal
15028 PRIVATE Void rgSchCmnUpdCfiVal(cell, delta)
15034 CmLteTimingInfo pdsch;
15035 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15044 TRC2(rgSchCmnUpdCfiVal);
15046 pdsch = cell->crntTime;
15047 RGSCH_INCR_SUB_FRAME(pdsch, delta);
15048 dlSf = rgSCHUtlSubFrmGet(cell, pdsch);
15049 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15050 *change happens in that SF then UL PDCCH allocation happens with old CFI
15051 *but CFI in control Req goes updated one since it was stored in the CELL
15053 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15054 if(cell->dynCfiCb.pdcchSfIdx != 0xFF)
15057 dlIdx = rgSCHUtlGetDlSfIdx(cell, &pdsch);
15059 dlIdx = (((pdsch.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (pdsch.slot % RGSCH_NUM_SUB_FRAMES));
15060 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15062 /* If current downlink subframe index is same as pdcch SF index,
15063 * perform the switching of CFI in this subframe */
15064 if(cell->dynCfiCb.pdcchSfIdx == dlIdx)
15066 cellCmnDl->currCfi = cellCmnDl->newCfi;
15067 cell->dynCfiCb.pdcchSfIdx = 0xFF;
15069 /* Updating the nCce value based on the new CFI */
15071 splSfCfi = cellCmnDl->newCfi;
15072 for(idx = 0; idx < cell->numDlSubfrms; idx++)
15074 tddSf = cell->subFrms[idx];
15076 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][tddSf->sfNum];
15078 if(tddSf->sfType == RG_SCH_SPL_SF_DATA)
15080 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, splSfCfi);
15082 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][splSfCfi];
15086 tddSf->nCce = cell->dynCfiCb.cfi2NCceTbl[mPhich][cellCmnDl->currCfi];
15089 /* Setting the switch over window length based on config index.
15090 * During switch over period all the UL trnsmissions are Acked
15092 cell->dynCfiCb.switchOvrWinLen =
15093 rgSchCfiSwitchOvrWinLen[cell->ulDlCfgIdx];
15095 cell->nCce = cell->dynCfiCb.cfi2NCceTbl[0][cellCmnDl->currCfi];
15096 /* Fix for DCFI FLE issue: when DL delta is 1 and UL delta is 0 and CFI
15097 *change happens in that SF then UL PDCCH allocation happens with old CFI
15098 *but CFI in control Req goes updated one since it was stored in the CELL
15100 dlSf->pdcchInfo.currCfi = cellCmnDl->currCfi;
15101 cell->dynCfiCb.switchOvrWinLen = rgSchCfiSwitchOvrWinLen[7];
15109 /***********************************************************
15111 * Func : rgSchCmnUpdtPdcchSfIdx
15113 * Desc : Update the switch over window length
15121 **********************************************************/
15124 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15131 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, sfNum)
15138 PRIVATE Void rgSchCmnUpdtPdcchSfIdx
15144 PRIVATE Void rgSchCmnUpdtPdcchSfIdx(cell, dlIdx)
15152 TRC2(rgSchCmnUpdtPdcchSfIdx);
15154 /* Resetting the parameters on CFI switching */
15155 cell->dynCfiCb.cceUsed = 0;
15156 cell->dynCfiCb.lowCceCnt = 0;
15158 cell->dynCfiCb.cceFailSum = 0;
15159 cell->dynCfiCb.cceFailCnt = 0;
15160 cell->dynCfiCb.prevCceFailIdx = 0;
15162 cell->dynCfiCb.switchOvrInProgress = TRUE;
15164 for(idx = 0; idx < cell->dynCfiCb.numFailSamples; idx++)
15166 cell->dynCfiCb.cceFailSamples[idx] = 0;
15169 cell->dynCfiCb.ttiCnt = 0;
15171 cell->dynCfiCb.cfiSwitches++;
15172 cfiSwitchCnt = cell->dynCfiCb.cfiSwitches;
15175 cell->dynCfiCb.pdcchSfIdx = (dlIdx +
15176 rgSchTddPdcchSfIncTbl[cell->ulDlCfgIdx][sfNum]) % cell->numDlSubfrms;
15178 cell->dynCfiCb.pdcchSfIdx = (dlIdx + RG_SCH_CFI_APPLY_DELTA) % \
15179 RGSCH_NUM_DL_slotS;
15183 /***********************************************************
15185 * Func : rgSchCmnUpdCfiDb
15187 * Desc : Update the counters related to dynamic
15188 * CFI feature in cellCb.
15196 **********************************************************/
15198 PUBLIC Void rgSchCmnUpdCfiDb
15204 PUBLIC Void rgSchCmnUpdCfiDb(cell, delta)
15209 CmLteTimingInfo frm;
15215 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15216 U8 nCceLowerCfi = 0;
15223 TRC2(rgSchCmnUpdCfiDb);
15225 /* Get Downlink Subframe */
15226 frm = cell->crntTime;
15227 RGSCH_INCR_SUB_FRAME(frm, delta);
15230 dlIdx = rgSCHUtlGetDlSfIdx(cell, &frm);
15231 dlSf = cell->subFrms[dlIdx];
15232 isHiDci0 = rgSchTddPuschTxKTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15234 /* Changing the idexing
15235 so that proper subframe is selected */
15236 dlIdx = (((frm.sfn & 1) * RGSCH_NUM_SUB_FRAMES) + (frm.slot % RGSCH_NUM_SUB_FRAMES));
15237 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, cell->subFrms, dlIdx);
15238 dlSf = cell->subFrms[dlIdx];
15241 currCfi = cellSch->dl.currCfi;
15243 if(!cell->dynCfiCb.switchOvrInProgress)
15246 if(!cell->dynCfiCb.isDynCfiEnb)
15248 if(currCfi != cellSch->cfiCfg.cfi)
15250 if(currCfi < cellSch->cfiCfg.cfi)
15252 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15253 cfiIncr = cell->dynCfiCb.cfiIncr;
15257 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15258 cfiDecr = cell->dynCfiCb.cfiDecr;
15265 /* Setting ttiMod to 0 for ttiCnt > 1000 in case if this
15266 * function was not called in UL subframe*/
15267 if(cell->dynCfiCb.ttiCnt > RGSCH_CFI_TTI_MON_INTRVL)
15274 ttiMod = cell->dynCfiCb.ttiCnt % RGSCH_CFI_TTI_MON_INTRVL;
15277 dlSf->dlUlBothCmplt++;
15279 if((dlSf->dlUlBothCmplt == 2) || (!isHiDci0))
15281 if(dlSf->dlUlBothCmplt == 2)
15284 /********************STEP UP CRITERIA********************/
15285 /* Updating the CCE failure count parameter */
15286 cell->dynCfiCb.cceFailCnt += dlSf->isCceFailure;
15287 cell->dynCfiCb.cceFailSum += dlSf->isCceFailure;
15289 /* Check if cfi step up can be performed */
15290 if(currCfi < cell->dynCfiCb.maxCfi)
15292 if(cell->dynCfiCb.cceFailSum >= cell->dynCfiCb.cfiStepUpTtiCnt)
15294 RG_SCH_CFI_STEP_UP(cell, cellSch, currCfi)
15295 cfiIncr = cell->dynCfiCb.cfiIncr;
15300 /********************STEP DOWN CRITERIA********************/
15302 /* Updating the no. of CCE used in this dl subframe */
15303 cell->dynCfiCb.cceUsed += dlSf->cceCnt;
15305 if(currCfi > RGSCH_MIN_CFI_VAL)
15307 /* calculating the number of CCE for next lower CFI */
15309 mPhich = rgSchTddPhichMValTbl[cell->ulDlCfgIdx][dlSf->sfNum];
15310 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[mPhich][currCfi-1];
15312 nCceLowerCfi = cell->dynCfiCb.cfi2NCceTbl[0][currCfi-1];
15314 if(dlSf->cceCnt < nCceLowerCfi)
15316 /* Updating the count of TTIs in which no. of CCEs
15317 * used were less than the CCEs of next lower CFI */
15318 cell->dynCfiCb.lowCceCnt++;
15323 totalCce = (nCceLowerCfi * cell->dynCfiCb.cfiStepDownTtiCnt *
15324 RGSCH_CFI_CCE_PERCNTG)/100;
15326 if((!cell->dynCfiCb.cceFailSum) &&
15327 (cell->dynCfiCb.lowCceCnt >=
15328 cell->dynCfiCb.cfiStepDownTtiCnt) &&
15329 (cell->dynCfiCb.cceUsed < totalCce))
15331 RG_SCH_CFI_STEP_DOWN(cell, cellSch, currCfi)
15332 cfiDecr = cell->dynCfiCb.cfiDecr;
15338 cceFailIdx = ttiMod/cell->dynCfiCb.failSamplePrd;
15340 if(cceFailIdx != cell->dynCfiCb.prevCceFailIdx)
15342 /* New sample period has started. Subtract the old count
15343 * from the new sample period */
15344 cell->dynCfiCb.cceFailSum -= cell->dynCfiCb.cceFailSamples[cceFailIdx];
15346 /* Store the previous sample period data */
15347 cell->dynCfiCb.cceFailSamples[cell->dynCfiCb.prevCceFailIdx]
15348 = cell->dynCfiCb.cceFailCnt;
15350 cell->dynCfiCb.prevCceFailIdx = cceFailIdx;
15352 /* Resetting the CCE failure count as zero for next sample period */
15353 cell->dynCfiCb.cceFailCnt = 0;
15358 /* Restting the parametrs after Monitoring Interval expired */
15359 cell->dynCfiCb.cceUsed = 0;
15360 cell->dynCfiCb.lowCceCnt = 0;
15361 cell->dynCfiCb.ttiCnt = 0;
15364 cell->dynCfiCb.ttiCnt++;
15368 if(cellSch->dl.newCfi != cellSch->dl.currCfi)
15371 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx, dlSf->sfNum);
15373 rgSchCmnUpdtPdcchSfIdx(cell, dlIdx);
15380 * @brief Dl Scheduler for Broadcast and Common channel scheduling.
15384 * Function: rgSCHCmnDlCommonChSch
15385 * Purpose: This function schedules DL Common channels for LTE.
15386 * Invoked by TTI processing in TOM. Scheduling is done for
15387 * BCCH, PCCH, Msg4, CCCH SDU, RAR in that order
15389 * Invoked by: TOM (TTI processing)
15391 * @param[in] RgSchCellCb *cell
15395 PUBLIC Void rgSCHCmnDlCommonChSch
15400 PUBLIC Void rgSCHCmnDlCommonChSch(cell)
15404 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15406 TRC2(rgSCHCmnDlCommonChSch);
15408 cellSch->apisDl->rgSCHDlTickForPdbTrkng(cell);
15409 rgSchCmnUpdCfiVal(cell, RG_SCH_CMN_DL_DELTA);
15411 /* handle Inactive UEs for DL */
15412 rgSCHCmnHdlDlInactUes(cell);
15414 /* Send a Tick to Refresh Timer */
15415 rgSCHCmnTmrProc(cell);
15417 if (cell->isDlDataAllwd && (cell->stopSiSch == FALSE))
15419 rgSCHCmnInitRbAlloc(cell);
15420 /* Perform DL scheduling of BCCH, PCCH */
15421 rgSCHCmnDlBcchPcchAlloc(cell);
15425 if(cell->siCb.inWindow != 0)
15427 cell->siCb.inWindow--;
15430 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
15432 rgSCHCmnDlCcchRarAlloc(cell);
15438 * @brief Scheduler invocation per TTI.
15442 * Function: rgSCHCmnUlSch
15443 * Purpose: This function implements UL scheduler alone. This is to
15444 * be able to perform scheduling with more flexibility.
15446 * Invoked by: TOM (TTI processing)
15448 * @param[in] RgSchCellCb *cell
15452 PUBLIC Void rgSCHCmnUlSch
15457 PUBLIC Void rgSCHCmnUlSch(cell)
15461 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15463 TRC2(rgSCHCmnUlSch);
15467 if(TRUE == rgSCHLaaSCellEnabled(cell))
15473 if(cellSch->ul.schdIdx != RGSCH_INVALID_INFO)
15475 rgSchCmnUpdCfiVal(cell, TFU_ULCNTRL_DLDELTA);
15477 /* Handle Inactive UEs for UL */
15478 rgSCHCmnHdlUlInactUes(cell);
15479 /* Perform UL Scheduling EVERY TTI */
15480 rgSCHCmnUlAlloc(cell);
15482 /* Calling function to update CFI parameters*/
15483 rgSchCmnUpdCfiDb(cell, TFU_ULCNTRL_DLDELTA);
15485 if(cell->dynCfiCb.switchOvrWinLen > 0)
15487 /* Decrementing the switchover window length */
15488 cell->dynCfiCb.switchOvrWinLen--;
15490 if(!cell->dynCfiCb.switchOvrWinLen)
15492 if(cell->dynCfiCb.dynCfiRecfgPend)
15494 /* Toggling the Dynamic CFI enabling */
15495 cell->dynCfiCb.isDynCfiEnb ^= 1;
15496 rgSCHDynCfiReCfg(cell, cell->dynCfiCb.isDynCfiEnb);
15497 cell->dynCfiCb.dynCfiRecfgPend = FALSE;
15499 cell->dynCfiCb.switchOvrInProgress = FALSE;
15507 rgSCHCmnSpsUlTti(cell, NULLP);
15517 * @brief This function updates the scheduler with service for an UE.
15521 * Function: rgSCHCmnDlDedBoUpd
15522 * Purpose: This function should be called whenever there is a
15523 * change BO for a service.
15525 * Invoked by: BO and Scheduler
15527 * @param[in] RgSchCellCb* cell
15528 * @param[in] RgSchUeCb* ue
15529 * @param[in] RgSchDlLcCb* svc
15534 PUBLIC Void rgSCHCmnDlDedBoUpd
15541 PUBLIC Void rgSCHCmnDlDedBoUpd(cell, ue, svc)
15547 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15548 TRC2(rgSCHCmnDlDedBoUpd);
15550 /* RACHO : if UEs idle time exceeded and a BO update
15551 * is received, then add UE to the pdcch Order Q */
15552 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
15554 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue, cell);
15555 /* If PDCCH order is already triggered and we are waiting for
15556 * RACH from UE then do not add to PdcchOdrQ. */
15557 if (ueDl->rachInfo.rapIdLnk.node == NULLP)
15559 rgSCHCmnDlAdd2PdcchOdrQ(cell, ue);
15565 /* If SPS service, invoke SPS module */
15566 if (svc->dlLcSpsCfg.isSpsEnabled)
15568 rgSCHCmnSpsDlDedBoUpd(cell, ue, svc);
15569 /* Note: Retrun from here, no update needed in other schedulers */
15574 if((cell->emtcEnable)&&(TRUE == ue->isEmtcUe))
15576 cellSch->apisEmtcDl->rgSCHDlDedBoUpd(cell, ue, svc);
15577 //printf("rgSCHEMTCDlDedBoUpd\n");
15582 cellSch->apisDl->rgSCHDlDedBoUpd(cell, ue, svc);
15587 rgSCHSCellDlDedBoUpd(cell, ue, svc);
15595 * @brief Removes an UE from Cell's TA List.
15599 * Function: rgSCHCmnRmvFrmTaLst
15600 * Purpose: Removes an UE from Cell's TA List.
15602 * Invoked by: Specific Scheduler
15604 * @param[in] RgSchCellCb* cell
15605 * @param[in] RgSchUeCb* ue
15610 PUBLIC Void rgSCHCmnRmvFrmTaLst
15616 PUBLIC Void rgSCHCmnRmvFrmTaLst(cell, ue)
15621 RgSchCmnDlCell *cellCmnDl = RG_SCH_CMN_GET_DL_CELL(cell);
15622 TRC2(rgSCHCmnRmvFrmTaLst);
15625 if(cell->emtcEnable && ue->isEmtcUe)
15627 rgSCHEmtcRmvFrmTaLst(cellCmnDl,ue);
15632 cmLListDelFrm(&cellCmnDl->taLst, &ue->dlTaLnk);
15633 ue->dlTaLnk.node = (PTR)NULLP;
15638 /* Fix: syed Remove the msg4Proc from cell
15639 * msg4Retx Queue. I have used CMN scheduler function
15640 * directly. Please define a new API and call this
15641 * function through that. */
15644 * @brief This function removes MSG4 HARQ process from cell RETX Queues.
15648 * Function: rgSCHCmnDlMsg4ProcRmvFrmRetx
15649 * Purpose: This function removes MSG4 HARQ process from cell RETX Queues.
15651 * Invoked by: UE/RACB deletion.
15653 * @param[in] RgSchCellCb* cell
15654 * @param[in] RgSchDlHqProc* hqP
15659 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx
15662 RgSchDlHqProcCb *hqP
15665 PUBLIC Void rgSCHCmnDlMsg4ProcRmvFrmRetx(cell, hqP)
15667 RgSchDlHqProcCb *hqP;
15670 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15671 TRC2(rgSCHCmnDlMsg4ProcRmvFrmRetx);
15673 if (hqP->tbInfo[0].ccchSchdInfo.retxLnk.node)
15675 if (hqP->hqE->msg4Proc == hqP)
15677 cmLListDelFrm(&cellSch->dl.msg4RetxLst, \
15678 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15679 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15682 else if(hqP->hqE->ccchSduProc == hqP)
15684 cmLListDelFrm(&cellSch->dl.ccchSduRetxLst,
15685 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15686 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)NULLP;
15695 * @brief This function adds a HARQ process for retx.
15699 * Function: rgSCHCmnDlProcAddToRetx
15700 * Purpose: This function adds a HARQ process to retransmission
15701 * queue. This may be performed when a HARQ ack is
15704 * Invoked by: HARQ feedback processing
15706 * @param[in] RgSchCellCb* cell
15707 * @param[in] RgSchDlHqProc* hqP
15712 PUBLIC Void rgSCHCmnDlProcAddToRetx
15715 RgSchDlHqProcCb *hqP
15718 PUBLIC Void rgSCHCmnDlProcAddToRetx(cell, hqP)
15720 RgSchDlHqProcCb *hqP;
15723 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
15724 TRC2(rgSCHCmnDlProcAddToRetx);
15726 if (hqP->hqE->msg4Proc == hqP) /* indicating msg4 transmission */
15728 cmLListAdd2Tail(&cellSch->dl.msg4RetxLst, \
15729 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15730 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15733 else if(hqP->hqE->ccchSduProc == hqP)
15735 /*If CCCH SDU being transmitted without cont res CE*/
15736 cmLListAdd2Tail(&cellSch->dl.ccchSduRetxLst,
15737 &hqP->tbInfo[0].ccchSchdInfo.retxLnk);
15738 hqP->tbInfo[0].ccchSchdInfo.retxLnk.node = (PTR)hqP;
15744 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
15746 /* Invoke SPS module for SPS HARQ proc re-transmission handling */
15747 rgSCHCmnSpsDlProcAddToRetx(cell, hqP);
15750 #endif /* LTEMAC_SPS */
15752 if((TRUE == cell->emtcEnable)
15753 && (TRUE == hqP->hqE->ue->isEmtcUe))
15755 cellSch->apisEmtcDl->rgSCHDlProcAddToRetx(cell, hqP);
15760 cellSch->apisDl->rgSCHDlProcAddToRetx(cell, hqP);
15768 * @brief This function performs RI validation and
15769 * updates it to the ueCb.
15773 * Function: rgSCHCmnDlSetUeRi
15774 * Purpose: This function performs RI validation and
15775 * updates it to the ueCb.
15777 * Invoked by: rgSCHCmnDlCqiInd
15779 * @param[in] RgSchCellCb *cell
15780 * @param[in] RgSchUeCb *ue
15782 * @param[in] Bool isPeriodic
15787 PRIVATE Void rgSCHCmnDlSetUeRi
15795 PRIVATE Void rgSCHCmnDlSetUeRi(cell, ue, ri, isPer)
15802 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15803 RgSchCmnUeInfo *ueSchCmn = RG_SCH_CMN_GET_CMN_UE(ue);
15804 TRC2(rgSCHCmnDlSetUeRi);
15807 RgSchUePCqiCb *cqiCb = RG_SCH_GET_UE_CELL_CQI_CB(ue,cell);
15812 /* FIX for RRC Reconfiguration issue */
15813 /* ccpu00140894- During Tx Mode transition RI report will not entertained for
15814 * specific during which SCH expecting UE can complete TX mode transition*/
15815 if (ue->txModeTransCmplt == FALSE)
15820 /* Restrict the Number of TX layers to cell->numTxAntPorts.
15821 * Protection from invalid RI values. */
15822 ri = RGSCH_MIN(ri, cell->numTxAntPorts);
15824 /* Special case of converting PMI to sane value when
15825 * there is a switch in RI from 1 to 2 and PMI reported
15826 * for RI=1 is invalid for RI=2 */
15827 if ((cell->numTxAntPorts == 2) && (ue->mimoInfo.txMode == RGR_UE_TM_4))
15829 if ((ri == 2) && ( ueDl->mimoInfo.ri == 1))
15831 ueDl->mimoInfo.pmi = (ueDl->mimoInfo.pmi < 2)? 1:2;
15835 /* Restrict the Number of TX layers according to the UE Category */
15836 ueDl->mimoInfo.ri = RGSCH_MIN(ri, rgUeCatTbl[ueSchCmn->ueCat].maxTxLyrs);
15838 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].riCnt[ueDl->mimoInfo.ri-1]++;
15839 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15843 ue->tenbStats->stats.nonPersistent.sch[0].riCnt[ueDl->mimoInfo.ri-1]++;
15844 cell->tenbStats->sch.riCnt[ueDl->mimoInfo.ri-1]++;
15850 /* If RI is from Periodic CQI report */
15851 cqiCb->perRiVal = ueDl->mimoInfo.ri;
15852 /* Reset at every Periodic RI Reception */
15853 cqiCb->invalidateCqi = FALSE;
15857 /* If RI is from Aperiodic CQI report */
15858 if (cqiCb->perRiVal != ueDl->mimoInfo.ri)
15860 /* if this aperRI is different from last reported
15861 * perRI then invalidate all CQI reports till next
15863 cqiCb->invalidateCqi = TRUE;
15867 cqiCb->invalidateCqi = FALSE;
15872 if (ueDl->mimoInfo.ri > 1)
15874 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15876 else if (ue->mimoInfo.txMode == RGR_UE_TM_3) /* ri == 1 */
15878 RG_SCH_CMN_SET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_RI_1);
15886 * @brief This function performs PMI validation and
15887 * updates it to the ueCb.
15891 * Function: rgSCHCmnDlSetUePmi
15892 * Purpose: This function performs PMI validation and
15893 * updates it to the ueCb.
15895 * Invoked by: rgSCHCmnDlCqiInd
15897 * @param[in] RgSchCellCb *cell
15898 * @param[in] RgSchUeCb *ue
15899 * @param[in] U8 pmi
15904 PRIVATE S16 rgSCHCmnDlSetUePmi
15911 PRIVATE S16 rgSCHCmnDlSetUePmi(cell, ue, pmi)
15917 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
15918 TRC2(rgSCHCmnDlSetUePmi);
15920 if (ue->txModeTransCmplt == FALSE)
15925 if (cell->numTxAntPorts == 2)
15931 if (ueDl->mimoInfo.ri == 2)
15933 /*ccpu00118150 - MOD - changed pmi value validation from 0 to 2*/
15934 /* PMI 2 and 3 are invalid incase of 2 TxAnt and 2 Layered SM */
15935 if (pmi == 2 || pmi == 3)
15939 ueDl->mimoInfo.pmi = pmi+1;
15943 ueDl->mimoInfo.pmi = pmi;
15946 else if (cell->numTxAntPorts == 4)
15952 ueDl->mimoInfo.pmi = pmi;
15954 /* Reset the No PMI Flag in forceTD */
15955 RG_SCH_CMN_UNSET_FORCE_TD(ue, cell, RG_SCH_CMN_TD_NO_PMI);
15960 * @brief This function Updates the DL CQI on PUCCH for the UE.
15964 * Function: rgSCHCmnDlProcCqiMode10
15966 * This function updates the DL CQI on PUCCH for the UE.
15968 * Invoked by: rgSCHCmnDlCqiOnPucchInd
15970 * Processing Steps:
15972 * @param[in] RgSchCellCb *cell
15973 * @param[in] RgSchUeCb *ue
15974 * @param[in] TfuDlCqiRpt *dlCqiRpt
15979 #ifdef RGR_CQI_REPT
15981 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
15985 TfuDlCqiPucch *pucchCqi,
15989 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail)
15992 TfuDlCqiPucch *pucchCqi;
15997 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10
16001 TfuDlCqiPucch *pucchCqi
16004 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi)
16007 TfuDlCqiPucch *pucchCqi;
16011 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16012 TRC2(rgSCHCmnDlProcCqiMode10);
16014 if (pucchCqi->u.mode10Info.type == TFU_RPT_CQI)
16016 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16017 /* Checking whether the decoded CQI is a value between 1 and 15*/
16018 if((pucchCqi->u.mode10Info.u.cqi) && (pucchCqi->u.mode10Info.u.cqi
16019 < RG_SCH_CMN_MAX_CQI))
16021 ueDl->cqiFlag = TRUE;
16022 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode10Info.u.cqi;
16023 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16024 /* ccpu00117452 - MOD - Changed macro name from
16025 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16026 #ifdef RGR_CQI_REPT
16027 *isCqiAvail = TRUE;
16035 else if (pucchCqi->u.mode10Info.type == TFU_RPT_RI)
16037 if ( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode10Info.u.ri) )
16039 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode10Info.u.ri,
16044 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16045 pucchCqi->u.mode10Info.u.ri,ue->ueId);
16052 * @brief This function Updates the DL CQI on PUCCH for the UE.
16056 * Function: rgSCHCmnDlProcCqiMode11
16058 * This function updates the DL CQI on PUCCH for the UE.
16060 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16062 * Processing Steps:
16063 * Process CQI MODE 11
16064 * @param[in] RgSchCellCb *cell
16065 * @param[in] RgSchUeCb *ue
16066 * @param[in] TfuDlCqiRpt *dlCqiRpt
16071 #ifdef RGR_CQI_REPT
16073 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16077 TfuDlCqiPucch *pucchCqi,
16079 Bool *is2ndCwCqiAvail
16082 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16085 TfuDlCqiPucch *pucchCqi;
16087 Bool *is2ndCwCqiAvail;
16091 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11
16095 TfuDlCqiPucch *pucchCqi
16098 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi)
16101 TfuDlCqiPucch *pucchCqi;
16105 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16106 TRC2(rgSCHCmnDlProcCqiMode11);
16108 if (pucchCqi->u.mode11Info.type == TFU_RPT_CQI)
16110 ue->mimoInfo.puschFdbkVld = FALSE;
16111 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16112 if((pucchCqi->u.mode11Info.u.cqi.cqi) &&
16113 (pucchCqi->u.mode11Info.u.cqi.cqi < RG_SCH_CMN_MAX_CQI))
16115 ueDl->cqiFlag = TRUE;
16116 /* ccpu00117452 - MOD - Changed macro name from
16117 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16118 #ifdef RGR_CQI_REPT
16119 *isCqiAvail = TRUE;
16121 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode11Info.u.cqi.cqi;
16122 if (pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.pres)
16124 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16125 ueDl->mimoInfo.cwInfo[1].cqi, \
16126 pucchCqi->u.mode11Info.u.cqi.wideDiffCqi.val);
16127 #ifdef RGR_CQI_REPT
16128 /* ccpu00117259 - ADD - Considering second codeword CQI info
16129 incase of MIMO for CQI Reporting */
16130 *is2ndCwCqiAvail = TRUE;
16138 rgSCHCmnDlSetUePmi(cell, ue, \
16139 pucchCqi->u.mode11Info.u.cqi.pmi);
16141 else if (pucchCqi->u.mode11Info.type == TFU_RPT_RI)
16143 if( RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode11Info.u.ri))
16145 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode11Info.u.ri,
16150 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16151 pucchCqi->u.mode11Info.u.ri,ue->ueId);
16158 * @brief This function Updates the DL CQI on PUCCH for the UE.
16162 * Function: rgSCHCmnDlProcCqiMode20
16164 * This function updates the DL CQI on PUCCH for the UE.
16166 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16168 * Processing Steps:
16169 * Process CQI MODE 20
16170 * @param[in] RgSchCellCb *cell
16171 * @param[in] RgSchUeCb *ue
16172 * @param[in] TfuDlCqiRpt *dlCqiRpt
16177 #ifdef RGR_CQI_REPT
16179 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16183 TfuDlCqiPucch *pucchCqi,
16187 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail )
16190 TfuDlCqiPucch *pucchCqi;
16195 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20
16199 TfuDlCqiPucch *pucchCqi
16202 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi)
16205 TfuDlCqiPucch *pucchCqi;
16209 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16210 TRC2(rgSCHCmnDlProcCqiMode20);
16212 if (pucchCqi->u.mode20Info.type == TFU_RPT_CQI)
16214 if (pucchCqi->u.mode20Info.u.cqi.isWideband)
16216 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16217 if((pucchCqi->u.mode20Info.u.cqi.u.wideCqi) &&
16218 (pucchCqi->u.mode20Info.u.cqi.u.wideCqi < RG_SCH_CMN_MAX_CQI))
16220 ueDl->cqiFlag = TRUE;
16221 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode20Info.u.cqi.\
16223 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16224 /* ccpu00117452 - MOD - Changed macro name from
16225 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16226 #ifdef RGR_CQI_REPT
16227 *isCqiAvail = TRUE;
16236 else if (pucchCqi->u.mode20Info.type == TFU_RPT_RI)
16238 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode20Info.u.ri))
16240 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode20Info.u.ri,
16245 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16246 pucchCqi->u.mode20Info.u.ri,ue->ueId);
16254 * @brief This function Updates the DL CQI on PUCCH for the UE.
16258 * Function: rgSCHCmnDlProcCqiMode21
16260 * This function updates the DL CQI on PUCCH for the UE.
16262 * Invoked by: rgSCHCmnDlCqiOnPucchInd
16264 * Processing Steps:
16265 * Process CQI MODE 21
16266 * @param[in] RgSchCellCb *cell
16267 * @param[in] RgSchUeCb *ue
16268 * @param[in] TfuDlCqiRpt *dlCqiRpt
16273 #ifdef RGR_CQI_REPT
16275 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16279 TfuDlCqiPucch *pucchCqi,
16281 Bool *is2ndCwCqiAvail
16284 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail, is2ndCwCqiAvail)
16287 TfuDlCqiPucch *pucchCqi;
16288 TfuDlCqiRpt *dlCqiRpt;
16290 Bool *is2ndCwCqiAvail;
16294 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21
16298 TfuDlCqiPucch *pucchCqi
16301 PRIVATE INLINE Void rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi)
16304 TfuDlCqiPucch *pucchCqi;
16308 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16309 TRC2(rgSCHCmnDlProcCqiMode21);
16311 if (pucchCqi->u.mode21Info.type == TFU_RPT_CQI)
16313 ue->mimoInfo.puschFdbkVld = FALSE;
16314 if (pucchCqi->u.mode21Info.u.cqi.isWideband)
16316 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16317 if((pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi) &&
16318 (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.cqi < RG_SCH_CMN_MAX_CQI))
16320 ueDl->cqiFlag = TRUE;
16321 ueDl->mimoInfo.cwInfo[0].cqi = pucchCqi->u.mode21Info.u.cqi.\
16323 if (pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.pres)
16325 RG_SCH_UPDT_CW2_CQI(ueDl->mimoInfo.cwInfo[0].cqi, \
16326 ueDl->mimoInfo.cwInfo[1].cqi, \
16327 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.diffCqi.val);
16328 #ifdef RGR_CQI_REPT
16329 /* ccpu00117259 - ADD - Considering second codeword CQI info
16330 incase of MIMO for CQI Reporting */
16331 *is2ndCwCqiAvail = TRUE;
16334 /* ccpu00117452 - MOD - Changed macro name from
16335 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16336 #ifdef RGR_CQI_REPT
16337 *isCqiAvail = TRUE;
16344 rgSCHCmnDlSetUePmi(cell, ue, \
16345 pucchCqi->u.mode21Info.u.cqi.u.wideCqi.pmi);
16348 else if (pucchCqi->u.mode21Info.type == TFU_RPT_RI)
16350 if(RG_SCH_CMN_IS_RI_VALID(pucchCqi->u.mode21Info.u.ri))
16352 rgSCHCmnDlSetUeRi(cell, ue, pucchCqi->u.mode21Info.u.ri,
16357 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Invalid RI value(%x) CRNTI:%d",
16358 pucchCqi->u.mode21Info.u.ri,ue->ueId);
16366 * @brief This function Updates the DL CQI on PUCCH for the UE.
16370 * Function: rgSCHCmnDlCqiOnPucchInd
16372 * This function updates the DL CQI on PUCCH for the UE.
16374 * Invoked by: rgSCHCmnDlCqiInd
16376 * Processing Steps:
16377 * - Depending on the reporting mode of the PUCCH, the CQI/PMI/RI values
16378 * are updated and stored for each UE
16380 * @param[in] RgSchCellCb *cell
16381 * @param[in] RgSchUeCb *ue
16382 * @param[in] TfuDlCqiRpt *dlCqiRpt
16387 #ifdef RGR_CQI_REPT
16389 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16393 TfuDlCqiPucch *pucchCqi,
16394 RgrUeCqiRept *ueCqiRept,
16396 Bool *is2ndCwCqiAvail
16399 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16402 TfuDlCqiPucch *pucchCqi;
16403 RgrUeCqiRept *ueCqiRept;
16405 Bool *is2ndCwCqiAvail;
16409 PRIVATE Void rgSCHCmnDlCqiOnPucchInd
16413 TfuDlCqiPucch *pucchCqi
16416 PRIVATE Void rgSCHCmnDlCqiOnPucchInd(cell, ue, pucchCqi)
16419 TfuDlCqiPucch *pucchCqi;
16423 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16424 TRC2(rgSCHCmnDlCqiOnPucchInd);
16426 /* ccpu00117452 - MOD - Changed
16427 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16428 #ifdef RGR_CQI_REPT
16429 /* Save CQI mode information in the report */
16430 ueCqiRept->cqiMode = pucchCqi->mode;
16433 switch(pucchCqi->mode)
16435 case TFU_PUCCH_CQI_MODE10:
16436 #ifdef RGR_CQI_REPT
16437 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi, isCqiAvail);
16439 rgSCHCmnDlProcCqiMode10(cell, ue, pucchCqi);
16441 ueDl->cqiFlag = TRUE;
16443 case TFU_PUCCH_CQI_MODE11:
16444 #ifdef RGR_CQI_REPT
16445 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi, isCqiAvail,
16448 rgSCHCmnDlProcCqiMode11(cell, ue, pucchCqi);
16450 ueDl->cqiFlag = TRUE;
16452 case TFU_PUCCH_CQI_MODE20:
16453 #ifdef RGR_CQI_REPT
16454 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi, isCqiAvail);
16456 rgSCHCmnDlProcCqiMode20(cell, ue, pucchCqi);
16458 ueDl->cqiFlag = TRUE;
16460 case TFU_PUCCH_CQI_MODE21:
16461 #ifdef RGR_CQI_REPT
16462 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi, isCqiAvail,
16465 rgSCHCmnDlProcCqiMode21(cell, ue, pucchCqi);
16467 ueDl->cqiFlag = TRUE;
16471 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Unknown CQI Mode %d",
16472 pucchCqi->mode,ue->ueId);
16473 /* ccpu00117452 - MOD - Changed macro name from
16474 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16475 #ifdef RGR_CQI_REPT
16476 *isCqiAvail = FALSE;
16483 } /* rgSCHCmnDlCqiOnPucchInd */
16487 * @brief This function Updates the DL CQI on PUSCH for the UE.
16491 * Function: rgSCHCmnDlCqiOnPuschInd
16493 * This function updates the DL CQI on PUSCH for the UE.
16495 * Invoked by: rgSCHCmnDlCqiInd
16497 * Processing Steps:
16498 * - Depending on the reporting mode of the PUSCH, the CQI/PMI/RI values
16499 * are updated and stored for each UE
16501 * @param[in] RgSchCellCb *cell
16502 * @param[in] RgSchUeCb *ue
16503 * @param[in] TfuDlCqiRpt *dlCqiRpt
16508 #ifdef RGR_CQI_REPT
16510 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16514 TfuDlCqiPusch *puschCqi,
16515 RgrUeCqiRept *ueCqiRept,
16517 Bool *is2ndCwCqiAvail
16520 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi, ueCqiRept, isCqiAvail, is2ndCwCqiAvail)
16523 TfuDlCqiPusch *puschCqi;
16524 RgrUeCqiRept *ueCqiRept;
16526 Bool *is2ndCwCqiAvail;
16530 PRIVATE Void rgSCHCmnDlCqiOnPuschInd
16534 TfuDlCqiPusch *puschCqi
16537 PRIVATE Void rgSCHCmnDlCqiOnPuschInd(cell, ue, puschCqi)
16540 TfuDlCqiPusch *puschCqi;
16544 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16546 TRC2(rgSCHCmnDlCqiOnPuschInd);
16547 if (puschCqi->ri.pres == PRSNT_NODEF)
16549 if (RG_SCH_CMN_IS_RI_VALID(puschCqi->ri.val))
16551 /* Saving the previous ri value to revert back
16552 in case PMI update failed */
16553 if (RGR_UE_TM_4 == ue->mimoInfo.txMode ) /* Cheking for TM4. TM8 check later */
16555 prevRiVal = ueDl->mimoInfo.ri;
16557 rgSCHCmnDlSetUeRi(cell, ue, puschCqi->ri.val, FALSE);
16561 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,"Invalid RI value(%x) CRNTI:%d",
16562 puschCqi->ri.val,ue->ueId);
16566 ue->mimoInfo.puschFdbkVld = FALSE;
16567 /* ccpu00117452 - MOD - Changed macro name from
16568 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16569 #ifdef RGR_CQI_REPT
16570 /* Save CQI mode information in the report */
16571 ueCqiRept->cqiMode = puschCqi->mode;
16572 /* ccpu00117259 - DEL - removed default setting of isCqiAvail to TRUE */
16575 switch(puschCqi->mode)
16577 case TFU_PUSCH_CQI_MODE_20:
16578 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16579 /* Checking whether the decoded CQI is a value between 1 and 15*/
16580 if((puschCqi->u.mode20Info.wideBandCqi) &&
16581 (puschCqi->u.mode20Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16583 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode20Info.wideBandCqi;
16584 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16585 /* ccpu00117452 - MOD - Changed macro name from
16586 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16587 #ifdef RGR_CQI_REPT
16588 *isCqiAvail = TRUE;
16596 case TFU_PUSCH_CQI_MODE_30:
16597 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16598 if((puschCqi->u.mode30Info.wideBandCqi) &&
16599 (puschCqi->u.mode30Info.wideBandCqi < RG_SCH_CMN_MAX_CQI))
16601 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode30Info.wideBandCqi;
16602 ueDl->mimoInfo.cwInfo[1].cqi = ueDl->mimoInfo.cwInfo[0].cqi;
16603 /* ccpu00117452 - MOD - Changed macro name from
16604 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16605 #ifdef RGR_CQI_REPT
16606 *isCqiAvail = TRUE;
16610 extern U32 gACqiRcvdCount;
16621 case TFU_PUSCH_CQI_MODE_12:
16622 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16623 if((puschCqi->u.mode12Info.cqiIdx[0]) &&
16624 (puschCqi->u.mode12Info.cqiIdx[0] < RG_SCH_CMN_MAX_CQI))
16626 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode12Info.cqiIdx[0];
16627 /* ccpu00117452 - MOD - Changed macro name from
16628 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16629 #ifdef RGR_CQI_REPT
16630 *isCqiAvail = TRUE;
16637 if((puschCqi->u.mode12Info.cqiIdx[1]) &&
16638 (puschCqi->u.mode12Info.cqiIdx[1] < RG_SCH_CMN_MAX_CQI))
16640 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode12Info.cqiIdx[1];
16641 /* ccpu00117452 - MOD - Changed macro name from
16642 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16643 #ifdef RGR_CQI_REPT
16644 /* ccpu00117259 - ADD - Considering second codeword CQI info
16645 incase of MIMO for CQI Reporting */
16646 *is2ndCwCqiAvail = TRUE;
16653 ue->mimoInfo.puschFdbkVld = TRUE;
16654 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_12;
16655 ue->mimoInfo.puschPmiInfo.u.mode12Info = puschCqi->u.mode12Info;
16656 /* : resetting this is time based. Make use of CQI reporting
16657 * periodicity, DELTA's in determining the exact time at which this
16658 * need to be reset. */
16660 case TFU_PUSCH_CQI_MODE_22:
16661 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16662 if((puschCqi->u.mode22Info.wideBandCqi[0]) &&
16663 (puschCqi->u.mode22Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16665 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode22Info.wideBandCqi[0];
16666 /* ccpu00117452 - MOD - Changed macro name from
16667 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16668 #ifdef RGR_CQI_REPT
16669 *isCqiAvail = TRUE;
16676 if((puschCqi->u.mode22Info.wideBandCqi[1]) &&
16677 (puschCqi->u.mode22Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16679 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode22Info.wideBandCqi[1];
16680 /* ccpu00117452 - MOD - Changed macro name from
16681 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16682 #ifdef RGR_CQI_REPT
16683 /* ccpu00117259 - ADD - Considering second codeword CQI info
16684 incase of MIMO for CQI Reporting */
16685 *is2ndCwCqiAvail = TRUE;
16692 rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode22Info.wideBandPmi);
16693 ue->mimoInfo.puschFdbkVld = TRUE;
16694 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_22;
16695 ue->mimoInfo.puschPmiInfo.u.mode22Info = puschCqi->u.mode22Info;
16697 case TFU_PUSCH_CQI_MODE_31:
16698 /*ccpu00109787 - ADD - Check for non-zero CQI*/
16699 if((puschCqi->u.mode31Info.wideBandCqi[0]) &&
16700 (puschCqi->u.mode31Info.wideBandCqi[0] < RG_SCH_CMN_MAX_CQI))
16702 ueDl->mimoInfo.cwInfo[0].cqi = puschCqi->u.mode31Info.wideBandCqi[0];
16703 /* ccpu00117452 - MOD - Changed macro name from
16704 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16705 #ifdef RGR_CQI_REPT
16706 *isCqiAvail = TRUE;
16709 if (ueDl->mimoInfo.ri > 1)
16711 if((puschCqi->u.mode31Info.wideBandCqi[1]) &&
16712 (puschCqi->u.mode31Info.wideBandCqi[1] < RG_SCH_CMN_MAX_CQI))
16714 ueDl->mimoInfo.cwInfo[1].cqi = puschCqi->u.mode31Info.wideBandCqi[1];
16715 /* ccpu00117452 - MOD - Changed macro name from
16716 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16717 #ifdef RGR_CQI_REPT
16718 /* ccpu00117259 - ADD - Considering second codeword CQI info
16719 incase of MIMO for CQI Reporting */
16720 *is2ndCwCqiAvail = TRUE;
16724 if (rgSCHCmnDlSetUePmi(cell, ue, puschCqi->u.mode31Info.pmi) != ROK)
16726 /* To avoid Rank and PMI inconsistency */
16727 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16728 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16730 ueDl->mimoInfo.ri = prevRiVal;
16733 ue->mimoInfo.puschPmiInfo.mode = TFU_PUSCH_CQI_MODE_31;
16734 ue->mimoInfo.puschPmiInfo.u.mode31Info = puschCqi->u.mode31Info;
16738 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId, "Unknown CQI Mode %d CRNTI:%d",
16739 puschCqi->mode,ue->ueId);
16740 /* CQI decoding failed revert the RI to previous value */
16741 if ((puschCqi->ri.pres == PRSNT_NODEF) &&
16742 (RGR_UE_TM_4 == ue->mimoInfo.txMode)) /* checking for TM4. TM8 check later */
16744 ueDl->mimoInfo.ri = prevRiVal;
16746 /* ccpu00117452 - MOD - Changed macro name from
16747 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16748 #ifdef RGR_CQI_REPT
16749 *isCqiAvail = FALSE;
16750 /* ccpu00117259 - ADD - Considering second codeword CQI info
16751 incase of MIMO for CQI Reporting */
16752 *is2ndCwCqiAvail = FALSE;
16759 } /* rgSCHCmnDlCqiOnPuschInd */
16763 * @brief This function Updates the DL CQI for the UE.
16767 * Function: rgSCHCmnDlCqiInd
16768 * Purpose: Updates the DL CQI for the UE
16772 * @param[in] RgSchCellCb *cell
16773 * @param[in] RgSchUeCb *ue
16774 * @param[in] TfuDlCqiRpt *dlCqi
16779 PUBLIC Void rgSCHCmnDlCqiInd
16785 CmLteTimingInfo timingInfo
16788 PUBLIC Void rgSCHCmnDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo)
16793 CmLteTimingInfo timingInfo;
16796 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
16797 /* ccpu00117452 - MOD - Changed macro name from
16798 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16799 #ifdef RGR_CQI_REPT
16800 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
16801 RgrUeCqiRept ueCqiRept = {{0}};
16802 Bool isCqiAvail = FALSE;
16803 /* ccpu00117259 - ADD - Considering second codeword CQI info
16804 incase of MIMO for CQI Reporting */
16805 Bool is2ndCwCqiAvail = FALSE;
16808 TRC2(rgSCHCmnDlCqiInd);
16810 #ifdef RGR_CQI_REPT
16813 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16817 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi, &ueCqiRept, &isCqiAvail, &is2ndCwCqiAvail);
16822 rgSCHCmnDlCqiOnPucchInd(cell, ue, (TfuDlCqiPucch *)dlCqi);
16826 rgSCHCmnDlCqiOnPuschInd(cell, ue, (TfuDlCqiPusch *)dlCqi);
16830 #ifdef CQI_CONFBITMASK_DROP
16831 if(!ue->cqiConfBitMask)
16833 if (ueDl->mimoInfo.cwInfo[0].cqi >15)
16835 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16836 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16838 else if ( ueDl->mimoInfo.cwInfo[0].cqi >= ue->prevCqi)
16840 ue->prevCqi = ueDl->mimoInfo.cwInfo[0].cqi;
16844 U8 dlCqiDeltaPrev = 0;
16845 dlCqiDeltaPrev = ue->prevCqi - ueDl->mimoInfo.cwInfo[0].cqi;
16846 if (dlCqiDeltaPrev > 3)
16847 dlCqiDeltaPrev = 3;
16848 if ((ue->prevCqi - dlCqiDeltaPrev) < 6)
16854 ue->prevCqi = ue->prevCqi - dlCqiDeltaPrev;
16856 ueDl->mimoInfo.cwInfo[0].cqi = ue->prevCqi;
16857 ueDl->mimoInfo.cwInfo[1].cqi = ue->prevCqi;
16863 /* ccpu00117452 - MOD - Changed macro name from
16864 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
16865 #ifdef RGR_CQI_REPT
16866 /* ccpu00117259 - ADD - Considering second codeword CQI info
16867 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail\
16868 in 'if' condition*/
16869 if (RG_SCH_CQIR_IS_PUSHNCQI_ENBLE(ue) && (isCqiAvail || is2ndCwCqiAvail))
16871 ueCqiRept.cqi[0] = ueDl->mimoInfo.cwInfo[0].cqi;
16873 /* ccpu00117259 - ADD - Considering second codeword CQI info
16874 incase of MIMO for CQI Reporting - added is2ndCwCqiAvail
16875 in 'if' condition*/
16876 ueCqiRept.cqi[1] = 0;
16877 if(is2ndCwCqiAvail)
16879 ueCqiRept.cqi[1] = ueDl->mimoInfo.cwInfo[1].cqi;
16881 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, &ueCqiRept);
16886 rgSCHCmnDlSetUeAllocLmtLa(cell, ue);
16887 rgSCHCheckAndSetTxScheme(cell, ue);
16890 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), ue->isEmtcUe);
16892 rgSCHCmnDlSetUeAllocLmt(cell, RG_SCH_CMN_GET_DL_UE(ue,cell), FALSE);
16896 if (cellSch->dl.isDlFreqSel)
16898 cellSch->apisDlfs->rgSCHDlfsDlCqiInd(cell, ue, isPucchInfo, dlCqi, timingInfo);
16901 /* Call SPS module to update CQI indication */
16902 rgSCHCmnSpsDlCqiIndHndlr(cell, ue, timingInfo);
16904 /* Call Specific scheduler to process on dlCqiInd */
16906 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
16908 cellSch->apisEmtcDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16913 cellSch->apisDl->rgSCHDlCqiInd(cell, ue, isPucchInfo, dlCqi);
16916 #ifdef RG_PFS_STATS
16917 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].avgCqi +=
16918 ueDl->mimoInfo.cwInfo[0].cqi;
16919 ue->pfsStats.cqiStats[(RG_SCH_GET_SCELL_INDEX(ue, cell))].totalCqiOcc++;
16923 ueDl->avgCqi += ueDl->mimoInfo.cwInfo[0].cqi;
16924 ueDl->numCqiOccns++;
16925 if (ueDl->mimoInfo.ri == 1)
16936 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16937 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16938 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw0Cqi ++;
16939 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].dlNumCw1Cqi ++;
16940 cell->tenbStats->sch.dlSumCw0Cqi += ueDl->mimoInfo.cwInfo[0].cqi;
16941 cell->tenbStats->sch.dlSumCw1Cqi += ueDl->mimoInfo.cwInfo[1].cqi;
16942 cell->tenbStats->sch.dlNumCw0Cqi ++;
16943 cell->tenbStats->sch.dlNumCw1Cqi ++;
16950 * @brief This function calculates the wideband CQI from SNR
16951 * reported for each RB.
16955 * Function: rgSCHCmnCalcWcqiFrmSnr
16956 * Purpose: Wideband CQI calculation from SNR
16958 * Invoked by: RG SCH
16960 * @param[in] RgSchCellCb *cell
16961 * @param[in] TfuSrsRpt *srsRpt,
16962 * @return Wideband CQI
16966 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr
16972 PRIVATE U8 rgSCHCmnCalcWcqiFrmSnr(cell,srsRpt)
16977 U8 wideCqi=1; /*Calculated value from SNR*/
16978 TRC2(rgSCHCmnCalcWcqiFrmSnr);
16979 /*Need to map a certain SNR with a WideCQI value.
16980 * The CQI calculation is still primitive. Further, need to
16981 * use a improvized method for calculating WideCQI from SNR*/
16982 if (srsRpt->snr[0] <=50)
16986 else if (srsRpt->snr[0]>=51 && srsRpt->snr[0] <=100)
16990 else if (srsRpt->snr[0]>=101 && srsRpt->snr[0] <=150)
16994 else if (srsRpt->snr[0]>=151 && srsRpt->snr[0] <=200)
16998 else if (srsRpt->snr[0]>=201 && srsRpt->snr[0] <=250)
17007 }/*rgSCHCmnCalcWcqiFrmSnr*/
17011 * @brief This function Updates the SRS for the UE.
17015 * Function: rgSCHCmnSrsInd
17016 * Purpose: Updates the UL SRS for the UE
17020 * @param[in] RgSchCellCb *cell
17021 * @param[in] RgSchUeCb *ue
17022 * @param[in] TfuSrsRpt *srsRpt,
17027 PUBLIC Void rgSCHCmnSrsInd
17032 CmLteTimingInfo timingInfo
17035 PUBLIC Void rgSCHCmnSrsInd(cell, ue, srsRpt, timingInfo)
17039 CmLteTimingInfo timingInfo;
17042 U8 wideCqi; /*Calculated value from SNR*/
17043 U32 recReqTime; /*Received Time in TTI*/
17044 TRC2(rgSCHCmnSrsInd);
17046 recReqTime = (timingInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) + timingInfo.slot;
17047 ue->srsCb.selectedAnt = (recReqTime/ue->srsCb.peri)%2;
17048 if(srsRpt->wideCqiPres)
17050 wideCqi = srsRpt->wideCqi;
17054 wideCqi = rgSCHCmnCalcWcqiFrmSnr(cell, srsRpt);
17056 rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi);
17058 }/*rgSCHCmnSrsInd*/
17063 * @brief This function is a handler for TA report for an UE.
17067 * Function: rgSCHCmnDlTARpt
17068 * Purpose: Determine based on UE_IDLE_TIME threshold,
17069 * whether UE needs to be Linked to the scheduler's TA list OR
17070 * if it needs a PDCCH Order.
17075 * @param[in] RgSchCellCb *cell
17076 * @param[in] RgSchUeCb *ue
17081 PUBLIC Void rgSCHCmnDlTARpt
17087 PUBLIC Void rgSCHCmnDlTARpt(cell, ue)
17092 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17093 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
17094 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
17095 CmLListCp poInactvLst;
17097 TRC2(rgSCHCmnDlTARpt);
17099 /* RACHO: If UE idle time is more than threshold, then
17100 * set its poInactv pdcch order inactivity */
17101 /* Fix : syed Ignore if TaTmr is not configured */
17102 if ((ue->dl.taCb.cfgTaTmr) && (rgSCHCmnUeIdleExdThrsld(cell, ue) == ROK))
17104 U32 prevDlMsk = ue->dl.dlInactvMask;
17105 U32 prevUlMsk = ue->ul.ulInactvMask;
17106 ue->dl.dlInactvMask |= RG_PDCCHODR_INACTIVE;
17107 ue->ul.ulInactvMask |= RG_PDCCHODR_INACTIVE;
17108 /* Indicate Specific scheduler for this UEs inactivity */
17109 cmLListInit(&poInactvLst);
17110 cmLListAdd2Tail(&poInactvLst, &ueDl->rachInfo.inActUeLnk);
17111 ueDl->rachInfo.inActUeLnk.node = (PTR)ue;
17112 /* Send inactivate ind only if not already sent */
17113 if (prevDlMsk == 0)
17115 cellSch->apisDl->rgSCHDlInactvtUes(cell, &poInactvLst);
17117 if (prevUlMsk == 0)
17119 cellSch->apisUl->rgSCHUlInactvtUes(cell, &poInactvLst);
17124 /* Fix: ccpu00124009 Fix for loop in the linked list "cellDl->taLst" */
17125 if (!ue->dlTaLnk.node)
17128 if(cell->emtcEnable)
17132 rgSCHEmtcAddToTaLst(cellDl,ue);
17139 cmLListAdd2Tail(&cellDl->taLst, &ue->dlTaLnk);
17140 ue->dlTaLnk.node = (PTR)ue;
17145 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
17146 "<TA>TA duplicate entry attempt failed: UEID:%u",
17155 * @brief Indication of UL CQI.
17159 * Function : rgSCHCmnFindUlCqiUlTxAnt
17161 * - Finds the Best Tx Antenna amongst the CQIs received
17162 * from Two Tx Antennas.
17164 * @param[in] RgSchCellCb *cell
17165 * @param[in] RgSchUeCb *ue
17166 * @param[in] U8 wideCqi
17170 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt
17177 PRIVATE Void rgSCHCmnFindUlCqiUlTxAnt(cell, ue, wideCqi)
17183 ue->validTxAnt = 1;
17185 } /* rgSCHCmnFindUlCqiUlTxAnt */
17189 * @brief Indication of UL CQI.
17193 * Function : rgSCHCmnUlCqiInd
17195 * - Updates uplink CQI information for the UE. Computes and
17196 * stores the lowest CQI of CQIs reported in all subbands.
17198 * @param[in] RgSchCellCb *cell
17199 * @param[in] RgSchUeCb *ue
17200 * @param[in] TfuUlCqiRpt *ulCqiInfo
17204 PUBLIC Void rgSCHCmnUlCqiInd
17208 TfuUlCqiRpt *ulCqiInfo
17211 PUBLIC Void rgSCHCmnUlCqiInd(cell, ue, ulCqiInfo)
17214 TfuUlCqiRpt *ulCqiInfo;
17217 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17218 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17223 #if (defined(SCH_STATS) || defined(TENB_STATS))
17224 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
17227 TRC2(rgSCHCmnUlCqiInd);
17228 /* consider inputs from SRS handlers about SRS occassions
17229 * in determining the UL TX Antenna selection */
17230 ueUl->crntUlCqi[0] = ulCqiInfo->wideCqi;
17232 ueUl->validUlCqi = ueUl->crntUlCqi[0];
17233 ue->validTxAnt = 0;
17235 iTbsNew = rgSchCmnUlCqiToTbsTbl[cell->isCpUlExtend][ueUl->validUlCqi];
17236 previTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
17238 if (RG_ITBS_DIFF(iTbsNew, previTbs) > 5)
17240 /* Ignore this iTBS report and mark that last iTBS report was */
17241 /* ignored so that subsequently we reset the LA algorithm */
17242 ueUl->ulLaCb.lastiTbsIgnored = TRUE;
17246 if (ueUl->ulLaCb.lastiTbsIgnored != TRUE)
17248 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17249 (80 * ueUl->ulLaCb.cqiBasediTbs))/100;
17253 /* Reset the LA as iTbs in use caught up with the value */
17254 /* reported by UE. */
17255 ueUl->ulLaCb.cqiBasediTbs = ((20 * iTbsNew * 100) +
17256 (80 * previTbs * 100))/100;
17257 ueUl->ulLaCb.deltaiTbs = 0;
17258 ueUl->ulLaCb.lastiTbsIgnored = FALSE;
17263 rgSCHPwrUlCqiInd(cell, ue);
17265 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17267 rgSCHCmnSpsUlCqiInd(cell, ue);
17270 /* Applicable to only some schedulers */
17272 if((TRUE == cell->emtcEnable) && (TRUE == ue->isEmtcUe))
17274 cellSch->apisEmtcUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17279 cellSch->apisUl->rgSCHUlCqiInd(cell, ue, ulCqiInfo);
17283 ueUl->numCqiOccns++;
17284 ueUl->avgCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17289 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17290 ue->tenbStats->stats.nonPersistent.sch[RG_SCH_CELLINDEX(cell)].ulNumCqi ++;
17291 cell->tenbStats->sch.ulSumCqi += rgSCHCmnUlGetCqi(cell, ue, ueCtg);
17292 cell->tenbStats->sch.ulNumCqi ++;
17297 } /* rgSCHCmnUlCqiInd */
17300 * @brief Returns HARQ proc for which data expected now.
17304 * Function: rgSCHCmnUlHqProcForUe
17305 * Purpose: This function returns the harq process for
17306 * which data is expected in the current subframe.
17307 * It does not validate that the HARQ process
17308 * has an allocation.
17312 * @param[in] RgSchCellCb *cell
17313 * @param[in] CmLteTimingInfo frm
17314 * @param[in] RgSchUeCb *ue
17315 * @param[out] RgSchUlHqProcCb **procRef
17319 PUBLIC Void rgSCHCmnUlHqProcForUe
17322 CmLteTimingInfo frm,
17324 RgSchUlHqProcCb **procRef
17327 PUBLIC Void rgSCHCmnUlHqProcForUe(cell, frm, ue, procRef)
17329 CmLteTimingInfo frm;
17331 RgSchUlHqProcCb **procRef;
17335 U8 procId = rgSCHCmnGetUlHqProcIdx(&frm, cell);
17337 TRC2(rgSCHCmnUlHqProcForUe);
17339 *procRef = rgSCHUhmGetUlHqProc(cell, ue, procId);
17341 *procRef = rgSCHUhmGetUlProcByTime(cell, ue, frm);
17348 * @brief Update harq process for allocation.
17352 * Function : rgSCHCmnUpdUlHqProc
17354 * This function is invoked when harq process
17355 * control block is now in a new memory location
17356 * thus requiring a pointer/reference update.
17358 * @param[in] RgSchCellCb *cell
17359 * @param[in] RgSchUlHqProcCb *curProc
17360 * @param[in] RgSchUlHqProcCb *oldProc
17366 PUBLIC S16 rgSCHCmnUpdUlHqProc
17369 RgSchUlHqProcCb *curProc,
17370 RgSchUlHqProcCb *oldProc
17373 PUBLIC S16 rgSCHCmnUpdUlHqProc(cell, curProc, oldProc)
17375 RgSchUlHqProcCb *curProc;
17376 RgSchUlHqProcCb *oldProc;
17379 TRC2(rgSCHCmnUpdUlHqProc);
17383 #if (ERRCLASS & ERRCLS_DEBUG)
17384 if (curProc->alloc == NULLP)
17389 curProc->alloc->hqProc = curProc;
17391 } /* rgSCHCmnUpdUlHqProc */
17394 /*MS_WORKAROUND for CR FIXME */
17396 * @brief Hsndles BSR timer expiry
17400 * Function : rgSCHCmnBsrTmrExpry
17402 * This function is invoked when periodic BSR timer expires for a UE.
17404 * @param[in] RgSchUeCb *ue
17410 PUBLIC S16 rgSCHCmnBsrTmrExpry
17415 PUBLIC S16 rgSCHCmnBsrTmrExpry(ueCb)
17419 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ueCb->cell);
17421 TRC2(rgSCHCmnBsrTmrExpry)
17423 ueCb->isSrGrant = TRUE;
17426 emtcStatsUlBsrTmrTxp++;
17430 if(ueCb->cell->emtcEnable)
17434 cellSch->apisEmtcUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17441 cellSch->apisUl->rgSCHSrRcvd(ueCb->cell, ueCb);
17448 * @brief Short BSR update.
17452 * Function : rgSCHCmnUpdBsrShort
17454 * This functions does requisite updates to handle short BSR reporting.
17456 * @param[in] RgSchCellCb *cell
17457 * @param[in] RgSchUeCb *ue
17458 * @param[in] RgSchLcgCb *ulLcg
17459 * @param[in] U8 bsr
17460 * @param[out] RgSchErrInfo *err
17466 PUBLIC S16 rgSCHCmnUpdBsrShort
17475 PUBLIC S16 rgSCHCmnUpdBsrShort(cell, ue, ulLcg, bsr, err)
17485 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
17487 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17488 RgSchCmnLcg *cmnLcg = NULLP;
17493 TRC2(rgSCHCmnUpdBsrShort);
17495 if (!RGSCH_LCG_ISCFGD(ulLcg))
17497 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17500 for (lcgCnt=0; lcgCnt<4; lcgCnt++)
17503 /* Set BS of all other LCGs to Zero.
17504 If Zero BSR is reported in Short BSR include this LCG too */
17505 if ((lcgCnt != ulLcg->lcgId) ||
17506 (!bsr && !ueUl->hqEnt.numBusyHqProcs))
17508 /* If old BO is zero do nothing */
17509 if(((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs != 0)
17511 for(idx = 0; idx < ue->ul.lcgArr[lcgCnt].numLch; idx++)
17513 if((ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount) &&
17514 (ue->ulActiveLCs & (1 <<
17515 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1))))
17518 ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->ulUeCount--;
17519 ue->ulActiveLCs &= ~(1 <<
17520 (ue->ul.lcgArr[lcgCnt].lcArray[idx]->qciCb->qci -1));
17526 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgCnt]))
17528 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->bs = 0;
17529 ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgCnt].sch))->reportedBs = 0;
17534 if(ulLcg->lcgId && bsr && (((RgSchCmnLcg *)(ulLcg->sch))->bs == 0))
17536 for(idx = 0; idx < ulLcg->numLch; idx++)
17539 if (!(ue->ulActiveLCs & (1 << (ulLcg->lcArray[idx]->qciCb->qci -1))))
17541 ulLcg->lcArray[idx]->qciCb->ulUeCount++;
17542 ue->ulActiveLCs |= (1 << (ulLcg->lcArray[idx]->qciCb->qci -1));
17547 /* Resetting the nonGbrLcgBs info here */
17548 ue->ul.nonGbrLcgBs = 0;
17549 ue->ul.nonLcg0Bs = 0;
17551 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17553 if (TRUE == ue->ul.useExtBSRSizes)
17555 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17559 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17561 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17563 /* TBD check for effGbr != 0 */
17564 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17566 else if (0 == ulLcg->lcgId)
17568 /* This is added for handling LCG0 */
17569 cmnLcg->bs = cmnLcg->reportedBs;
17573 /* Update non GBR LCG's BS*/
17574 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17575 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17577 ue->ul.totalBsr = cmnLcg->bs;
17580 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (bsr == 0))
17582 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17586 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
17588 rgSCHCmnSpsBsrRpt(cell, ue, ulLcg);
17591 rgSCHCmnUpdUlCompEffBsr(ue);
17594 if(cell->emtcEnable)
17598 cellSch->apisEmtcUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17605 cellSch->apisUl->rgSCHUpdBsrShort(cell, ue, ulLcg, bsr);
17609 if (ue->ul.isUlCaEnabled && ue->numSCells)
17611 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17613 #ifndef PAL_ENABLE_UL_CA
17614 if((ue->cellInfo[sCellIdx] != NULLP) &&
17615 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17617 if(ue->cellInfo[sCellIdx] != NULLP)
17620 cellSch->apisUl->rgSCHUpdBsrShort(ue->cellInfo[sCellIdx]->cell,
17631 * @brief Truncated BSR update.
17635 * Function : rgSCHCmnUpdBsrTrunc
17637 * This functions does required updates to handle truncated BSR report.
17640 * @param[in] RgSchCellCb *cell
17641 * @param[in] RgSchUeCb *ue
17642 * @param[in] RgSchLcgCb *ulLcg
17643 * @param[in] U8 bsr
17644 * @param[out] RgSchErrInfo *err
17650 PUBLIC S16 rgSCHCmnUpdBsrTrunc
17659 PUBLIC S16 rgSCHCmnUpdBsrTrunc(cell, ue, ulLcg, bsr, err)
17667 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17668 RgSchCmnLcg *cmnLcg = NULLP;
17674 TRC2(rgSCHCmnUpdBsrTrunc);
17676 if (!RGSCH_LCG_ISCFGD(ulLcg))
17678 err->errCause = RGSCHERR_SCH_LCG_NOT_CFGD;
17681 /* set all higher prio lcgs bs to 0 and update this lcgs bs and
17682 total bsr= sumofall lcgs bs */
17685 for (cnt = ulLcg->lcgId-1; cnt >= 0; cnt--)
17688 /* If Existing BO is zero the don't do anything */
17689 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs != 0)
17691 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17694 if((ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount) &&
17695 (ue->ulActiveLCs & (1 <<
17696 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17698 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount--;
17699 ue->ulActiveLCs &= ~(1 <<
17700 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17705 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs = 0;
17706 ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->reportedBs = 0;
17711 for (cnt = ulLcg->lcgId; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17713 if (ulLcg->lcgId == 0)
17717 /* If Existing BO is zero the don't do anything */
17718 if(((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs == 0)
17720 for(idx = 0; idx < ue->ul.lcgArr[cnt].numLch; idx++)
17723 if (!(ue->ulActiveLCs & (1 <<
17724 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1))))
17726 ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->ulUeCount++;
17727 ue->ulActiveLCs |= (1 <<
17728 (ue->ul.lcgArr[cnt].lcArray[idx]->qciCb->qci -1));
17734 ue->ul.nonGbrLcgBs = 0;
17735 ue->ul.nonLcg0Bs = 0;
17736 cmnLcg = ((RgSchCmnLcg *)(ulLcg->sch));
17737 if (TRUE == ue->ul.useExtBSRSizes)
17739 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsr];
17743 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsr];
17745 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17747 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17749 else if(ulLcg->lcgId == 0)
17751 /* This is for handeling LCG0 */
17752 cmnLcg->bs = cmnLcg->reportedBs;
17756 ue->ul.nonGbrLcgBs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
17757 cmnLcg->bs = ue->ul.nonGbrLcgBs;
17759 ue->ul.totalBsr = cmnLcg->bs;
17761 for (cnt = ulLcg->lcgId+1; cnt < RGSCH_MAX_LCG_PER_UE; cnt++)
17763 /* TODO: The bs for the other LCGs may be stale because some or all of
17764 * the part of bs may have been already scheduled/data received. Please
17765 * consider this when truncated BSR is tested/implemented */
17766 ue->ul.totalBsr += ((RgSchCmnLcg *)(ue->ul.lcgArr[cnt].sch))->bs;
17769 rgSCHCmnUpdUlCompEffBsr(ue);
17772 if(cell->emtcEnable)
17776 cellSch->apisEmtcUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17783 cellSch->apisUl->rgSCHUpdBsrTrunc(cell, ue, ulLcg, bsr);
17787 if (ue->ul.isUlCaEnabled && ue->numSCells)
17789 for(U8 sCellIdx = 1; sCellIdx <= RG_SCH_MAX_SCELL ; sCellIdx++)
17791 #ifndef PAL_ENABLE_UL_CA
17792 if((ue->cellInfo[sCellIdx] != NULLP) &&
17793 (ue->cellInfo[sCellIdx]->sCellState == RG_SCH_SCELL_ACTIVE))
17795 if(ue->cellInfo[sCellIdx] != NULLP)
17798 cellSch->apisUl->rgSCHUpdBsrTrunc(ue->cellInfo[sCellIdx]->cell, ue, ulLcg, bsr);
17808 * @brief Long BSR update.
17812 * Function : rgSCHCmnUpdBsrLong
17814 * - Update BSRs for all configured LCGs.
17815 * - Update priority of LCGs if needed.
17816 * - Update UE's position within/across uplink scheduling queues.
17819 * @param[in] RgSchCellCb *cell
17820 * @param[in] RgSchUeCb *ue
17821 * @param[in] U8 bsArr[]
17822 * @param[out] RgSchErrInfo *err
17828 PUBLIC S16 rgSCHCmnUpdBsrLong
17836 PUBLIC S16 rgSCHCmnUpdBsrLong(cell, ue, bsArr, err)
17843 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
17844 U32 tmpBsArr[4] = {0, 0, 0, 0};
17852 TRC2(rgSCHCmnUpdBsrLong);
17855 for(idx1 = 1; idx1 < RGSCH_MAX_LCG_PER_UE; idx1++)
17857 /* If Old BO is non zero then do nothing */
17858 if ((((RgSchCmnLcg *)(ue->ul.lcgArr[idx1].sch))->bs == 0)
17861 for(idx2 = 0; idx2 < ue->ul.lcgArr[idx1].numLch; idx2++)
17864 if (!(ue->ulActiveLCs & (1 <<
17865 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1))))
17867 ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->ulUeCount++;
17868 ue->ulActiveLCs |= (1 <<
17869 (ue->ul.lcgArr[idx1].lcArray[idx2]->qciCb->qci -1));
17875 ue->ul.nonGbrLcgBs = 0;
17876 ue->ul.nonLcg0Bs = 0;
17878 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[0]))
17880 if (TRUE == ue->ul.useExtBSRSizes)
17882 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnExtBsrTbl[bsArr[0]];
17883 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnExtBsrTbl[bsArr[0]];
17884 tmpBsArr[0] = rgSchCmnExtBsrTbl[bsArr[0]];
17888 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = rgSchCmnBsrTbl[bsArr[0]];
17889 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->reportedBs = rgSchCmnBsrTbl[bsArr[0]];
17890 tmpBsArr[0] = rgSchCmnBsrTbl[bsArr[0]];
17893 for (lcgId = 1; lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
17895 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
17897 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
17899 if (TRUE == ue->ul.useExtBSRSizes)
17901 cmnLcg->reportedBs = rgSchCmnExtBsrTbl[bsArr[lcgId]];
17905 cmnLcg->reportedBs = rgSchCmnBsrTbl[bsArr[lcgId]];
17907 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
17909 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr + cmnLcg->effDeltaMbr);
17910 tmpBsArr[lcgId] = cmnLcg->bs;
17914 nonGbrBs += cmnLcg->reportedBs;
17915 tmpBsArr[lcgId] = cmnLcg->reportedBs;
17916 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs,ue->ul.effAmbr);
17920 ue->ul.nonGbrLcgBs = RGSCH_MIN(nonGbrBs,ue->ul.effAmbr);
17922 ue->ul.totalBsr = tmpBsArr[0] + tmpBsArr[1] + tmpBsArr[2] + tmpBsArr[3];
17924 if ((ue->bsrTmr.tmrEvnt != TMR_NONE) && (ue->ul.totalBsr == 0))
17926 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
17931 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE) /* SPS_FIX */
17933 if(ue->ul.totalBsr - tmpBsArr[1] == 0)
17934 {/* Updaing the BSR to SPS only if LCG1 BS is present in sps active state */
17935 rgSCHCmnSpsBsrRpt(cell, ue, &ue->ul.lcgArr[1]);
17939 rgSCHCmnUpdUlCompEffBsr(ue);
17942 if(cell->emtcEnable)
17946 cellSch->apisEmtcUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17953 cellSch->apisUl->rgSCHUpdBsrLong(cell, ue, bsArr);
17957 if (ue->ul.isUlCaEnabled && ue->numSCells)
17959 for(U8 idx = 1; idx <= RG_SCH_MAX_SCELL ; idx++)
17961 #ifndef PAL_ENABLE_UL_CA
17962 if((ue->cellInfo[idx] != NULLP) &&
17963 (ue->cellInfo[idx]->sCellState == RG_SCH_SCELL_ACTIVE))
17965 if(ue->cellInfo[idx] != NULLP)
17968 cellSch->apisUl->rgSCHUpdBsrLong(ue->cellInfo[idx]->cell, ue, bsArr);
17978 * @brief PHR update.
17982 * Function : rgSCHCmnUpdExtPhr
17984 * Updates extended power headroom information for an UE.
17986 * @param[in] RgSchCellCb *cell
17987 * @param[in] RgSchUeCb *ue
17988 * @param[in] U8 phr
17989 * @param[out] RgSchErrInfo *err
17995 PUBLIC S16 rgSCHCmnUpdExtPhr
17999 RgInfExtPhrCEInfo *extPhr,
18003 PUBLIC S16 rgSCHCmnUpdExtPhr(cell, ue, extPhr, err)
18006 RgInfExtPhrCEInfo *extPhr;
18010 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18011 RgSchCmnAllocRecord *allRcd;
18012 CmLList *node = ueUl->ulAllocLst.last;
18015 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18017 TRC2(rgSCHCmnUpdExtPhr);
18023 allRcd = (RgSchCmnAllocRecord *)node->node;
18025 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18027 rgSCHPwrUpdExtPhr(cell, ue, extPhr, allRcd);
18032 if(ulSpsUe->isUlSpsActv)
18034 rgSCHCmnSpsPhrInd(cell,ue);
18039 } /* rgSCHCmnUpdExtPhr */
18045 * @brief PHR update.
18049 * Function : rgSCHCmnUpdPhr
18051 * Updates power headroom information for an UE.
18053 * @param[in] RgSchCellCb *cell
18054 * @param[in] RgSchUeCb *ue
18055 * @param[in] U8 phr
18056 * @param[out] RgSchErrInfo *err
18062 PUBLIC S16 rgSCHCmnUpdPhr
18070 PUBLIC S16 rgSCHCmnUpdPhr(cell, ue, phr, err)
18077 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18078 RgSchCmnAllocRecord *allRcd;
18079 CmLList *node = ueUl->ulAllocLst.last;
18082 RgSchCmnUlUeSpsInfo *ulSpsUe = RG_SCH_CMN_GET_UL_SPS_UE(ue,cell);
18084 TRC2(rgSCHCmnUpdPhr);
18090 allRcd = (RgSchCmnAllocRecord *)node->node;
18092 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18094 rgSCHPwrUpdPhr(cell, ue, phr, allRcd, RG_SCH_CMN_PWR_USE_CFG_MAX_PWR);
18099 if(ulSpsUe->isUlSpsActv)
18101 rgSCHCmnSpsPhrInd(cell,ue);
18106 } /* rgSCHCmnUpdPhr */
18109 * @brief UL grant for contention resolution.
18113 * Function : rgSCHCmnContResUlGrant
18115 * Add UE to another queue specifically for CRNTI based contention
18119 * @param[in] RgSchUeCb *ue
18120 * @param[out] RgSchErrInfo *err
18126 PUBLIC S16 rgSCHCmnContResUlGrant
18133 PUBLIC S16 rgSCHCmnContResUlGrant(cell, ue, err)
18139 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18140 TRC2(rgSCHCmnContResUlGrant);
18143 if(cell->emtcEnable)
18147 cellSch->apisEmtcUl->rgSCHContResUlGrant(cell, ue);
18154 cellSch->apisUl->rgSCHContResUlGrant(cell, ue);
18160 * @brief SR reception handling.
18164 * Function : rgSCHCmnSrRcvd
18166 * - Update UE's position within/across uplink scheduling queues
18167 * - Update priority of LCGs if needed.
18169 * @param[in] RgSchCellCb *cell
18170 * @param[in] RgSchUeCb *ue
18171 * @param[in] CmLteTimingInfo frm
18172 * @param[out] RgSchErrInfo *err
18178 PUBLIC S16 rgSCHCmnSrRcvd
18182 CmLteTimingInfo frm,
18186 PUBLIC S16 rgSCHCmnSrRcvd(cell, ue, frm, err)
18189 CmLteTimingInfo frm;
18193 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
18194 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18195 CmLList *node = ueUl->ulAllocLst.last;
18197 TRC2(rgSCHCmnSrRcvd);
18200 emtcStatsUlTomSrInd++;
18203 RGSCH_INCR_SUB_FRAME(frm, 1); /* 1 TTI after the time SR was sent */
18206 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)node->node;
18207 if (RGSCH_TIMEINFO_SAME(frm, allRcd->allocTime))
18213 //TODO_SID Need to check when it is getting triggered
18214 ue->isSrGrant = TRUE;
18216 if(cell->emtcEnable)
18220 cellSch->apisEmtcUl->rgSCHSrRcvd(cell, ue);
18227 cellSch->apisUl->rgSCHSrRcvd(cell, ue);
18233 * @brief Returns first uplink allocation to send reception
18238 * Function: rgSCHCmnFirstRcptnReq(cell)
18239 * Purpose: This function returns the first uplink allocation
18240 * (or NULLP if there is none) in the subframe
18241 * in which is expected to prepare and send reception
18246 * @param[in] RgSchCellCb *cell
18247 * @return RgSchUlAlloc*
18250 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq
18255 PUBLIC RgSchUlAlloc *rgSCHCmnFirstRcptnReq(cell)
18259 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18261 RgSchUlAlloc* alloc = NULLP;
18263 TRC2(rgSCHCmnFirstRcptnReq);
18265 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18267 RgSchUlSf* sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18268 alloc = rgSCHUtlUlAllocFirst(sf);
18270 if (alloc && alloc->hqProc == NULLP)
18272 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18280 * @brief Returns first uplink allocation to send reception
18285 * Function: rgSCHCmnNextRcptnReq(cell)
18286 * Purpose: This function returns the next uplink allocation
18287 * (or NULLP if there is none) in the subframe
18288 * in which is expected to prepare and send reception
18293 * @param[in] RgSchCellCb *cell
18294 * @return RgSchUlAlloc*
18297 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq
18300 RgSchUlAlloc *alloc
18303 PUBLIC RgSchUlAlloc *rgSCHCmnNextRcptnReq(cell, alloc)
18305 RgSchUlAlloc *alloc;
18308 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18310 //RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18312 TRC2(rgSCHCmnNextRcptnReq);
18314 if (cellUl->rcpReqIdx != RGSCH_INVALID_INFO)
18316 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->rcpReqIdx];
18318 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18319 if (alloc && alloc->hqProc == NULLP)
18321 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18332 * @brief Collates DRX enabled UE's scheduled in this SF
18336 * Function: rgSCHCmnDrxStrtInActvTmrInUl(cell)
18337 * Purpose: This function collates the link
18338 * of UE's scheduled in this SF who
18339 * have drx enabled. It then calls
18340 * DRX specific function to start/restart
18341 * inactivity timer in Ul
18345 * @param[in] RgSchCellCb *cell
18349 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl
18354 PUBLIC Void rgSCHCmnDrxStrtInActvTmrInUl(cell)
18358 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18359 RgSchUlSf *sf = &(cellUl->ulSfArr[cellUl->schdIdx]);
18360 RgSchUlAlloc *alloc = rgSCHUtlUlAllocFirst(sf);
18365 TRC2(rgSCHCmnDrxStrtInActvTmrInUl);
18367 cmLListInit(&ulUeLst);
18375 if (!(alloc->grnt.isRtx) && ueCb->isDrxEnabled && !(ueCb->isSrGrant)
18377 /* ccpu00139513- DRX inactivity timer should not be started for
18378 * UL SPS occasions */
18379 && (alloc->hqProc->isSpsOccnHqP == FALSE)
18383 cmLListAdd2Tail(&ulUeLst,&(ueCb->ulDrxInactvTmrLnk));
18384 ueCb->ulDrxInactvTmrLnk.node = (PTR)ueCb;
18388 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18391 (Void)rgSCHDrxStrtInActvTmr(cell,&ulUeLst,RG_SCH_DRX_UL);
18398 * @brief Returns first uplink allocation to send HARQ feedback
18403 * Function: rgSCHCmnFirstHqFdbkAlloc
18404 * Purpose: This function returns the first uplink allocation
18405 * (or NULLP if there is none) in the subframe
18406 * for which it is expected to prepare and send HARQ
18411 * @param[in] RgSchCellCb *cell
18412 * @param[in] U8 idx
18413 * @return RgSchUlAlloc*
18416 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc
18422 PUBLIC RgSchUlAlloc *rgSCHCmnFirstHqFdbkAlloc(cell, idx)
18427 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18429 RgSchUlAlloc *alloc = NULLP;
18431 TRC2(rgSCHCmnFirstHqFdbkAlloc);
18433 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18435 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18436 alloc = rgSCHUtlUlAllocFirst(sf);
18438 while (alloc && (alloc->hqProc == NULLP))
18440 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18448 * @brief Returns next allocation to send HARQ feedback for.
18452 * Function: rgSCHCmnNextHqFdbkAlloc(cell)
18453 * Purpose: This function returns the next uplink allocation
18454 * (or NULLP if there is none) in the subframe
18455 * for which HARQ feedback needs to be sent.
18459 * @param[in] RgSchCellCb *cell
18460 * @return RgSchUlAlloc*
18463 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc
18466 RgSchUlAlloc *alloc,
18470 PUBLIC RgSchUlAlloc *rgSCHCmnNextHqFdbkAlloc(cell, alloc, idx)
18472 RgSchUlAlloc *alloc;
18476 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18477 TRC2(rgSCHCmnNextHqFdbkAlloc);
18479 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
18481 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
18483 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18484 while (alloc && (alloc->hqProc == NULLP))
18486 alloc = rgSCHUtlUlAllocNxt(sf, alloc);
18496 /***********************************************************
18498 * Func : rgSCHCmnUlGetITbsFrmIMcs
18500 * Desc : Returns the Itbs that is mapped to an Imcs
18501 * for the case of uplink.
18509 **********************************************************/
18511 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs
18516 PUBLIC U8 rgSCHCmnUlGetITbsFrmIMcs(iMcs)
18520 TRC2(rgSCHCmnUlGetITbsFrmIMcs);
18522 return (rgUlIMcsTbl[iMcs].iTbs);
18525 /***********************************************************
18527 * Func : rgSCHCmnUlGetIMcsFrmITbs
18529 * Desc : Returns the Imcs that is mapped to an Itbs
18530 * for the case of uplink.
18534 * Notes: For iTbs 19, iMcs is dependant on modulation order.
18535 * Refer to 36.213, Table 8.6.1-1 and 36.306 Table 4.1-2
18536 * for UE capability information
18540 **********************************************************/
18542 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs
18545 CmLteUeCategory ueCtg
18548 PUBLIC U8 rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg)
18550 CmLteUeCategory ueCtg;
18554 TRC2(rgSCHCmnUlGetIMcsFrmITbs);
18560 /*a higher layer can force a 64QAM UE to transmit at 16QAM.
18561 * We currently do not support this. Once the support for such
18562 * is added, ueCtg should be replaced by current transmit
18563 * modulation configuration.Refer to 36.213 -8.6.1
18565 else if ( iTbs < 19 )
18569 else if ((iTbs == 19) && (ueCtg != CM_LTE_UE_CAT_5))
18579 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
18580 was seen when IMCS exceeds 20 on T2k TDD*/
18590 /***********************************************************
18592 * Func : rgSCHCmnUlMinTbBitsForITbs
18594 * Desc : Returns the minimum number of bits that can
18595 * be given as grant for a specific CQI.
18603 **********************************************************/
18605 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs
18607 RgSchCmnUlCell *cellUl,
18611 PUBLIC U32 rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs)
18612 RgSchCmnUlCell *cellUl;
18616 TRC2(rgSCHCmnUlMinTbBitsForITbs);
18618 RGSCH_ARRAY_BOUND_CHECK(0, rgTbSzTbl[0], iTbs);
18620 return (rgTbSzTbl[0][iTbs][cellUl->sbSize-1]);
18623 /***********************************************************
18625 * Func : rgSCHCmnUlSbAlloc
18627 * Desc : Given a required 'number of subbands' and a hole,
18628 * returns a suitable alloc such that the subband
18629 * allocation size is valid
18633 * Notes: Does not assume either passed numSb or hole size
18634 * to be valid for allocation, and hence arrives at
18635 * an acceptable value.
18638 **********************************************************/
18640 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc
18647 PUBLIC RgSchUlAlloc *rgSCHCmnUlSbAlloc(sf, numSb, hole)
18653 U8 holeSz; /* valid hole size */
18654 RgSchUlAlloc *alloc;
18655 TRC2(rgSCHCmnUlSbAlloc);
18657 if ((holeSz = rgSchCmnMult235Tbl[hole->num].prvMatch) == hole->num)
18659 numSb = rgSchCmnMult235Tbl[numSb].match;
18660 if (numSb >= holeSz)
18662 alloc = rgSCHUtlUlAllocGetCompHole(sf, hole);
18666 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18671 if (numSb < holeSz)
18673 numSb = rgSchCmnMult235Tbl[numSb].match;
18677 numSb = rgSchCmnMult235Tbl[numSb].prvMatch;
18680 if ( numSb >= holeSz )
18684 alloc = rgSCHUtlUlAllocGetPartHole(sf, numSb, hole);
18690 * @brief To fill the RgSchCmnUeUlAlloc structure of UeCb.
18694 * Function: rgSCHCmnUlUeFillAllocInfo
18695 * Purpose: Specific scheduler to call this API to fill the alloc
18698 * Invoked by: Scheduler
18700 * @param[in] RgSchCellCb *cell
18701 * @param[out] RgSchUeCb *ue
18705 PUBLIC Void rgSCHCmnUlUeFillAllocInfo
18711 PUBLIC Void rgSCHCmnUlUeFillAllocInfo(cell, ue)
18716 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18717 RgSchCmnUeUlAlloc *ulAllocInfo;
18718 RgSchCmnUlUe *ueUl;
18720 TRC2(rgSCHCmnUlUeFillAllocInfo);
18722 ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18723 ulAllocInfo = &ueUl->alloc;
18725 /* Fill alloc structure */
18726 rgSCHCmnUlAllocFillTpc(cell, ue, ulAllocInfo->alloc);
18727 rgSCHCmnUlAllocFillNdmrs(cellUl, ulAllocInfo->alloc);
18728 rgSCHCmnUlAllocLnkHqProc(ue, ulAllocInfo->alloc, ulAllocInfo->alloc->hqProc,
18729 ulAllocInfo->alloc->hqProc->isRetx);
18731 rgSCHCmnUlFillPdcchWithAlloc(ulAllocInfo->alloc->pdcch,
18732 ulAllocInfo->alloc, ue);
18733 /* Recording information about this allocation */
18734 rgSCHCmnUlRecordUeAlloc(cell, ue);
18736 /* Update the UE's outstanding allocation */
18737 if (!ulAllocInfo->alloc->hqProc->isRetx)
18739 rgSCHCmnUlUpdOutStndAlloc(cell, ue, ulAllocInfo->allocdBytes);
18746 * @brief Update the UEs outstanding alloc based on the BSR report's timing.
18751 * Function: rgSCHCmnUpdUlCompEffBsr
18752 * Purpose: Clear off all the allocations from outstanding allocation that
18753 * are later than or equal to BSR timing information (stored in UEs datIndTime).
18755 * Invoked by: Scheduler
18757 * @param[in] RgSchUeCb *ue
18761 PRIVATE Void rgSCHCmnUpdUlCompEffBsr
18766 PRIVATE Void rgSCHCmnUpdUlCompEffBsr(ue)
18770 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,ue->cell);
18771 CmLList *node = ueUl->ulAllocLst.last;
18772 RgSchCmnAllocRecord *allRcd;
18773 U32 outStndAlloc=0;
18774 U32 nonLcg0OutStndAllocBs=0;
18777 RgSchCmnLcg *cmnLcg = NULLP;
18778 TRC2(rgSCHCmnUpdUlCompEffBsr);
18782 allRcd = (RgSchCmnAllocRecord *)node->node;
18783 if (RGSCH_TIMEINFO_SAME(ue->macCeRptTime, allRcd->allocTime))
18792 allRcd = (RgSchCmnAllocRecord *)node->node;
18794 outStndAlloc += allRcd->alloc;
18797 cmnLcg = (RgSchCmnLcg *)(ue->ul.lcgArr[0].sch);
18798 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
18799 if (cmnLcg->bs > outStndAlloc)
18801 cmnLcg->bs -= outStndAlloc;
18802 ue->ul.minReqBytes = cmnLcg->bs;
18807 nonLcg0OutStndAllocBs = outStndAlloc - cmnLcg->bs;
18811 for(lcgId = 1;lcgId < RGSCH_MAX_LCG_PER_UE; lcgId++)
18813 if(RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
18815 cmnLcg = ((RgSchCmnLcg *) (ue->ul.lcgArr[lcgId].sch));
18816 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
18818 nonLcg0Bsr += cmnLcg->bs;
18822 nonLcg0Bsr += ue->ul.nonGbrLcgBs;
18823 if (nonLcg0OutStndAllocBs > nonLcg0Bsr)
18829 nonLcg0Bsr -= nonLcg0OutStndAllocBs;
18831 ue->ul.nonLcg0Bs = nonLcg0Bsr;
18832 /* Cap effBsr with nonLcg0Bsr and append lcg0 bs.
18833 * nonLcg0Bsr limit applies only to lcg1,2,3 */
18834 /* better be handled in individual scheduler */
18835 ue->ul.effBsr = nonLcg0Bsr +\
18836 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
18841 * @brief Records information about the current allocation.
18845 * Function: rgSCHCmnUlRecordUeAlloc
18846 * Purpose: Records information about the curent allocation.
18847 * This includes the allocated bytes, as well
18848 * as some power information.
18850 * Invoked by: Scheduler
18852 * @param[in] RgSchCellCb *cell
18853 * @param[in] RgSchUeCb *ue
18857 PUBLIC Void rgSCHCmnUlRecordUeAlloc
18863 PUBLIC Void rgSCHCmnUlRecordUeAlloc(cell, ue)
18869 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
18871 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18872 CmLListCp *lst = &ueUl->ulAllocLst;
18873 CmLList *node = ueUl->ulAllocLst.first;
18874 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18875 RgSchCmnUeUlAlloc *ulAllocInfo = &ueUl->alloc;
18876 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
18877 TRC2(rgSCHCmnUlRecordUeAlloc);
18879 cmLListDelFrm(lst, &allRcd->lnk);
18881 /* To the crntTime, add the MIN time at which UE will
18882 * actually send the BSR i.e DELTA+4 */
18883 allRcd->allocTime = cell->crntTime;
18884 /*ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
18886 if(ue->isEmtcUe == TRUE)
18888 RGSCH_INCR_SUB_FRAME_EMTC(allRcd->allocTime,
18889 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18894 RGSCH_INCR_SUB_FRAME(allRcd->allocTime,
18895 (TFU_ULCNTRL_DLDELTA + RGSCH_PDCCH_PUSCH_DELTA));
18898 allRcd->allocTime = cellUl->schdTime;
18900 cmLListAdd2Tail(lst, &allRcd->lnk);
18902 /* Filling in the parameters to be recorded */
18903 allRcd->alloc = ulAllocInfo->allocdBytes;
18904 //allRcd->numRb = ulAllocInfo->alloc->grnt.numRb;
18905 allRcd->numRb = (ulAllocInfo->alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
18906 /*Recording the UL CQI derived from the maxUlCqi */
18907 allRcd->cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
18908 allRcd->tpc = ulAllocInfo->alloc->grnt.tpc;
18910 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18912 cell->measurements.ulBytesCnt += ulAllocInfo->allocdBytes;
18917 /** PHR handling for MSG3
18918 * @brief Records allocation information of msg3 in the the UE.
18922 * Function: rgSCHCmnUlRecMsg3Alloc
18923 * Purpose: Records information about msg3 allocation.
18924 * This includes the allocated bytes, as well
18925 * as some power information.
18927 * Invoked by: Scheduler
18929 * @param[in] RgSchCellCb *cell
18930 * @param[in] RgSchUeCb *ue
18931 * @param[in] RgSchRaCb *raCb
18935 PUBLIC Void rgSCHCmnUlRecMsg3Alloc
18942 PUBLIC Void rgSCHCmnUlRecMsg3Alloc(cell, ue, raCb)
18948 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
18949 CmLListCp *lst = &ueUl->ulAllocLst;
18950 CmLList *node = ueUl->ulAllocLst.first;
18951 RgSchCmnAllocRecord *allRcd = (RgSchCmnAllocRecord *)(node->node);
18953 /* Stack Crash problem for TRACE5 changes */
18954 TRC2(rgSCHCmnUlRecMsg3Alloc);
18956 cmLListDelFrm(lst, node);
18957 allRcd->allocTime = raCb->msg3AllocTime;
18958 cmLListAdd2Tail(lst, node);
18960 /* Filling in the parameters to be recorded */
18961 allRcd->alloc = raCb->msg3Grnt.datSz;
18962 allRcd->numRb = raCb->msg3Grnt.numRb;
18963 allRcd->cqi = raCb->ccchCqi;
18964 allRcd->tpc = raCb->msg3Grnt.tpc;
18966 rgSCHPwrRecordRbAlloc(cell, ue, allRcd->numRb);
18971 * @brief Keeps track of the most recent RG_SCH_CMN_MAX_ALLOC_TRACK
18972 * allocations to track. Adds this allocation to the ueUl's ulAllocLst.
18977 * Function: rgSCHCmnUlUpdOutStndAlloc
18978 * Purpose: Recent Allocation shall be at First Pos'n.
18979 * Remove the last node, update the fields
18980 * with the new allocation and add at front.
18982 * Invoked by: Scheduler
18984 * @param[in] RgSchCellCb *cell
18985 * @param[in] RgSchUeCb *ue
18986 * @param[in] U32 alloc
18990 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc
18997 PUBLIC Void rgSCHCmnUlUpdOutStndAlloc(cell, ue, alloc)
19003 U32 nonLcg0Alloc=0;
19004 TRC2(rgSCHCmnUlUpdOutStndAlloc);
19006 /* Update UEs LCG0's bs according to the total outstanding BSR allocation.*/
19007 if (((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs > alloc)
19009 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs -= alloc;
19013 nonLcg0Alloc = alloc - ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19014 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs = 0;
19017 if (nonLcg0Alloc >= ue->ul.nonLcg0Bs)
19019 ue->ul.nonLcg0Bs = 0;
19023 ue->ul.nonLcg0Bs -= nonLcg0Alloc;
19025 /* Cap effBsr with effAmbr and append lcg0 bs.
19026 * effAmbr limit applies only to lcg1,2,3 non GBR LCG's*/
19027 /* better be handled in individual scheduler */
19028 ue->ul.effBsr = ue->ul.nonLcg0Bs +\
19029 ((RgSchCmnLcg *)(ue->ul.lcgArr[0].sch))->bs;
19031 if (ue->ul.effBsr == 0)
19033 if (ue->bsrTmr.tmrEvnt != TMR_NONE)
19035 rgSCHTmrStopTmr(cell, ue->bsrTmr.tmrEvnt, ue);
19038 if (FALSE == ue->isSrGrant)
19040 if (ue->ul.bsrTmrCfg.isPrdBsrTmrPres)
19043 rgSCHTmrStartTmr(cell, ue, RG_SCH_TMR_BSR,
19044 ue->ul.bsrTmrCfg.prdBsrTmr);
19050 /* Resetting UEs lower Cap */
19051 ue->ul.minReqBytes = 0;
19058 * @brief Returns the "Itbs" for a given UE.
19062 * Function: rgSCHCmnUlGetITbs
19063 * Purpose: This function returns the "Itbs" for a given UE.
19065 * Invoked by: Scheduler
19067 * @param[in] RgSchUeCb *ue
19071 PUBLIC U8 rgSCHCmnUlGetITbs
19078 PUBLIC U8 rgSCHCmnUlGetITbs(cell, ue, isEcp)
19084 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
19085 /* CQI will be capped to maxUlCqi for 16qam UEs */
19086 CmLteUeCategory ueCtgy = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
19090 U8 maxiTbs = rgSchCmnUlCqiToTbsTbl[(U8)isEcp][ueUl->maxUlCqi];
19093 TRC2(rgSCHCmnUlGetITbs);
19095 /* #ifdef RG_SCH_CMN_EXT_CP_SUP For ECP pick index 1 */
19097 if ( (ueCtgy != CM_LTE_UE_CAT_5) &&
19098 (ueUl->validUlCqi > ueUl->maxUlCqi)
19101 cqi = ueUl->maxUlCqi;
19105 cqi = ueUl->validUlCqi;
19109 iTbs = (ueUl->ulLaCb.cqiBasediTbs + ueUl->ulLaCb.deltaiTbs)/100;
19111 RG_SCH_CHK_ITBS_RANGE(iTbs, maxiTbs);
19113 iTbs = RGSCH_MIN(iTbs, ue->cell->thresholds.maxUlItbs);
19116 /* This is a Temp fix, done for TENBPLUS-3898, ULSCH SDU corruption
19117 was seen when IMCS exceeds 20 on T2k TDD */
19126 if ( (ueCtgy != CM_LTE_UE_CAT_5) && (ueUl->crntUlCqi[0] > ueUl->maxUlCqi ))
19128 cqi = ueUl->maxUlCqi;
19132 cqi = ueUl->crntUlCqi[0];
19135 return (rgSchCmnUlCqiToTbsTbl[(U8)isEcp][cqi]);
19139 * @brief This function adds the UE to DLRbAllocInfo TX lst.
19143 * Function: rgSCHCmnDlRbInfoAddUeTx
19144 * Purpose: This function adds the UE to DLRbAllocInfo TX lst.
19146 * Invoked by: Common Scheduler
19148 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19149 * @param[in] RgSchUeCb *ue
19150 * @param[in] RgSchDlHqProcCb *hqP
19155 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx
19158 RgSchCmnDlRbAllocInfo *allocInfo,
19160 RgSchDlHqProcCb *hqP
19163 PRIVATE Void rgSCHCmnDlRbInfoAddUeTx(cell, allocInfo, ue, hqP)
19165 RgSchCmnDlRbAllocInfo *allocInfo;
19167 RgSchDlHqProcCb *hqP;
19170 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
19172 TRC2(rgSCHCmnDlRbInfoAddUeTx);
19174 if (hqP->reqLnk.node == NULLP)
19176 if (cellSch->dl.isDlFreqSel)
19178 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19179 &allocInfo->dedAlloc.txHqPLst, hqP);
19184 cmLListAdd2Tail(&allocInfo->dedAlloc.txHqPLst, &hqP->reqLnk);
19186 hqP->reqLnk.node = (PTR)hqP;
19193 * @brief This function adds the UE to DLRbAllocInfo RETX lst.
19197 * Function: rgSCHCmnDlRbInfoAddUeRetx
19198 * Purpose: This function adds the UE to DLRbAllocInfo RETX lst.
19200 * Invoked by: Common Scheduler
19202 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19203 * @param[in] RgSchUeCb *ue
19204 * @param[in] RgSchDlHqProcCb *hqP
19209 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx
19212 RgSchCmnDlRbAllocInfo *allocInfo,
19214 RgSchDlHqProcCb *hqP
19217 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetx(cell, allocInfo, ue, hqP)
19219 RgSchCmnDlRbAllocInfo *allocInfo;
19221 RgSchDlHqProcCb *hqP;
19224 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19226 TRC2(rgSCHCmnDlRbInfoAddUeRetx);
19228 if (cellSch->dl.isDlFreqSel)
19230 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19231 &allocInfo->dedAlloc.retxHqPLst, hqP);
19235 /* checking UE's presence in this lst is unnecessary */
19236 cmLListAdd2Tail(&allocInfo->dedAlloc.retxHqPLst, &hqP->reqLnk);
19237 hqP->reqLnk.node = (PTR)hqP;
19243 * @brief This function adds the UE to DLRbAllocInfo TX-RETX lst.
19247 * Function: rgSCHCmnDlRbInfoAddUeRetxTx
19248 * Purpose: This adds the UE to DLRbAllocInfo TX-RETX lst.
19250 * Invoked by: Common Scheduler
19252 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19253 * @param[in] RgSchUeCb *ue
19254 * @param[in] RgSchDlHqProcCb *hqP
19259 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx
19262 RgSchCmnDlRbAllocInfo *allocInfo,
19264 RgSchDlHqProcCb *hqP
19267 PRIVATE Void rgSCHCmnDlRbInfoAddUeRetxTx(allocInfo, ue, hqP)
19269 RgSchCmnDlRbAllocInfo *allocInfo;
19271 RgSchDlHqProcCb *hqP;
19274 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(ue->cell);
19276 TRC2(rgSCHCmnDlRbInfoAddUeRetxTx);
19278 if (cellSch->dl.isDlFreqSel)
19280 cellSch->apisDlfs->rgSCHDlfsAddUeToLst(cell,
19281 &allocInfo->dedAlloc.txRetxHqPLst, hqP);
19285 cmLListAdd2Tail(&allocInfo->dedAlloc.txRetxHqPLst, &hqP->reqLnk);
19286 hqP->reqLnk.node = (PTR)hqP;
19292 * @brief This function adds the UE to DLRbAllocInfo NonSchdRetxLst.
19296 * Function: rgSCHCmnDlAdd2NonSchdRetxLst
19297 * Purpose: During RB estimation for RETX, if allocation fails
19298 * then appending it to NonSchdRetxLst, the further
19299 * action is taken as part of Finalization in
19300 * respective schedulers.
19302 * Invoked by: Common Scheduler
19304 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19305 * @param[in] RgSchUeCb *ue
19306 * @param[in] RgSchDlHqProcCb *hqP
19311 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst
19313 RgSchCmnDlRbAllocInfo *allocInfo,
19315 RgSchDlHqProcCb *hqP
19318 PRIVATE Void rgSCHCmnDlAdd2NonSchdRetxLst(allocInfo, ue, hqP)
19319 RgSchCmnDlRbAllocInfo *allocInfo;
19321 RgSchDlHqProcCb *hqP;
19324 CmLList *schdLnkNode;
19326 TRC2(rgSCHCmnDlAdd2NonSchdRetxLst);
19329 if ( (hqP->sch != (RgSchCmnDlHqProc *)NULLP) &&
19330 (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP)))
19336 schdLnkNode = &hqP->schdLstLnk;
19337 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
19338 cmLListAdd2Tail(&allocInfo->dedAlloc.nonSchdRetxHqPLst, schdLnkNode);
19346 * @brief This function adds the UE to DLRbAllocInfo NonSchdTxRetxLst.
19350 * Function: rgSCHCmnDlAdd2NonSchdTxRetxLst
19351 * Purpose: During RB estimation for TXRETX, if allocation fails
19352 * then appending it to NonSchdTxRetxLst, the further
19353 * action is taken as part of Finalization in
19354 * respective schedulers.
19356 * Invoked by: Common Scheduler
19358 * @param[out] RgSchCmnDlRbAllocInfo *allocInfo
19359 * @param[in] RgSchUeCb *ue
19360 * @param[in] RgSchDlHqProcCb *hqP
19366 * @brief This function handles the initialisation of DL HARQ/ACK feedback
19367 * timing information for eaach DL subframe.
19371 * Function: rgSCHCmnDlANFdbkInit
19372 * Purpose: Each DL subframe stores the sfn and subframe
19373 * information of UL subframe in which it expects
19374 * HARQ ACK/NACK feedback for this subframe.It
19375 * generates the information based on Downlink
19376 * Association Set Index table.
19378 * Invoked by: Scheduler
19380 * @param[in] RgSchCellCb* cell
19385 PRIVATE S16 rgSCHCmnDlANFdbkInit
19390 PRIVATE S16 rgSCHCmnDlANFdbkInit(cell)
19395 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19396 U8 maxDlSubfrms = cell->numDlSubfrms;
19403 RgSchTddSubfrmInfo ulSubfrmInfo;
19406 TRC2(rgSCHCmnDlANFdbkInit);
19408 ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19409 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19411 /* Generate HARQ ACK/NACK feedback information for each DL sf in a radio frame
19412 * Calculate this information based on DL Association set Index table */
19413 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19415 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19416 RG_SCH_TDD_UL_SUBFRAME)
19418 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19422 for(idx=0; idx < rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19423 numFdbkSubfrms; idx++)
19425 calcSfNum = sfNum - rgSchTddDlAscSetIdxKTbl[ulDlCfgIdx][sfNum].\
19429 calcSfnOffset = RGSCH_CEIL(-calcSfNum, RGSCH_NUM_SUB_FRAMES);
19436 calcSfNum = ((RGSCH_NUM_SUB_FRAMES * calcSfnOffset) + calcSfNum)\
19437 % RGSCH_NUM_SUB_FRAMES;
19439 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19443 else if((ulSubfrmInfo.switchPoints == 2) && (calcSfNum <= \
19444 RG_SCH_CMN_SPL_SUBFRM_6))
19446 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19450 dlIdx = calcSfNum - maxUlSubfrms;
19453 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = sfNum;
19454 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = calcSfnOffset;
19455 cell->subFrms[dlIdx]->dlFdbkInfo.m = idx;
19457 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19460 /* DL subframes in the subsequent radio frames are initialized
19461 * with the previous radio frames */
19462 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;\
19465 sfNum = dlIdx - rgSchTddNumDlSubfrmTbl[ulDlCfgIdx]\
19466 [RGSCH_NUM_SUB_FRAMES-1];
19467 cell->subFrms[dlIdx]->dlFdbkInfo.subframe = \
19468 cell->subFrms[sfNum]->dlFdbkInfo.subframe;
19469 cell->subFrms[dlIdx]->dlFdbkInfo.sfnOffset = \
19470 cell->subFrms[sfNum]->dlFdbkInfo.sfnOffset;
19471 cell->subFrms[dlIdx]->dlFdbkInfo.m = cell->subFrms[sfNum]->dlFdbkInfo.m;
19477 * @brief This function handles the initialization of uplink association
19478 * set information for each DL subframe.
19483 * Function: rgSCHCmnDlKdashUlAscInit
19484 * Purpose: Each DL sf stores the sfn and sf information of UL sf
19485 * in which it expects HQ ACK/NACK trans. It generates the information
19486 * based on k` in UL association set index table.
19488 * Invoked by: Scheduler
19490 * @param[in] RgSchCellCb* cell
19495 PRIVATE S16 rgSCHCmnDlKdashUlAscInit
19500 PRIVATE S16 rgSCHCmnDlKdashUlAscInit(cell)
19505 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19506 U8 maxDlSubfrms = cell->numDlSubfrms;
19512 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19513 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19514 [RGSCH_NUM_SUB_FRAMES-1];
19517 TRC2(rgSCHCmnDlKdashUlAscInit);
19519 /* Generate ACK/NACK offset information for each DL subframe in a radio frame
19520 * Calculate this information based on K` in UL Association Set table */
19521 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19523 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19524 RG_SCH_TDD_UL_SUBFRAME)
19526 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19530 calcSfNum = (sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum] + \
19531 RGSCH_NUM_SUB_FRAMES) % RGSCH_NUM_SUB_FRAMES;
19532 calcSfnOffset = sfNum - rgSchTddUlAscIdxKDashTbl[ulDlCfgIdx-1][sfNum];
19533 if(calcSfnOffset < 0)
19535 calcSfnOffset = RGSCH_CEIL(-calcSfnOffset, RGSCH_NUM_SUB_FRAMES);
19542 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19546 else if((ulSubfrmInfo.switchPoints == 2) &&
19547 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19549 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19553 dlIdx = calcSfNum - maxUlSubfrms;
19556 cell->subFrms[dlIdx]->ulAscInfo.subframe = sfNum;
19557 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset = calcSfnOffset;
19559 /* set dlIdx for which ulAscInfo is updated */
19560 dlPres = dlPres | (1 << dlIdx);
19561 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19564 /* Set Invalid information for which ulAscInfo is not present */
19566 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19569 /* If dlPres is 0, ulAscInfo is not present in that DL index */
19570 if(! ((dlPres >> sfCount)&0x01))
19572 cell->subFrms[sfCount]->ulAscInfo.sfnOffset =
19573 RGSCH_INVALID_INFO;
19574 cell->subFrms[sfCount]->ulAscInfo.subframe =
19575 RGSCH_INVALID_INFO;
19579 /* DL subframes in the subsequent radio frames are initialized
19580 * with the previous radio frames */
19581 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms; dlIdx < maxDlSubfrms;
19585 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19586 cell->subFrms[dlIdx]->ulAscInfo.subframe =
19587 cell->subFrms[sfNum]->ulAscInfo.subframe;
19588 cell->subFrms[dlIdx]->ulAscInfo.sfnOffset =
19589 cell->subFrms[sfNum]->ulAscInfo.sfnOffset;
19596 * @brief This function initialises the 'Np' value for 'p'
19600 * Function: rgSCHCmnDlNpValInit
19601 * Purpose: To initialise the 'Np' value for each 'p'. It is used
19602 * to find the mapping between nCCE and 'p' and used in
19603 * HARQ ACK/NACK reception.
19605 * Invoked by: Scheduler
19607 * @param[in] RgSchCellCb* cell
19612 PRIVATE S16 rgSCHCmnDlNpValInit
19617 PRIVATE S16 rgSCHCmnDlNpValInit(cell)
19623 TRC2(rgSCHCmnDlNpValInit);
19625 /* Always Np is 0 for p=0 */
19626 cell->rgSchTddNpValTbl[0] = 0;
19628 for(idx=1; idx < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; idx++)
19630 np = cell->bwCfg.dlTotalBw * (idx * RG_SCH_CMN_NUM_SUBCAR - 4);
19631 cell->rgSchTddNpValTbl[idx] = (U8) (np/36);
19638 * @brief This function handles the creation of RACH preamble
19639 * list to queue the preambles and process at the scheduled
19644 * Function: rgSCHCmnDlCreateRachPrmLst
19645 * Purpose: To create RACH preamble list based on RA window size.
19646 * It is used to queue the preambles and process it at the
19649 * Invoked by: Scheduler
19651 * @param[in] RgSchCellCb* cell
19656 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst
19661 PRIVATE S16 rgSCHCmnDlCreateRachPrmLst(cell)
19669 TRC2(rgSCHCmnDlCreateRachPrmLst);
19671 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19673 lstSize = raArrSz * RGSCH_MAX_RA_RNTI_PER_SUBFRM * RGSCH_NUM_SUB_FRAMES;
19675 cell->raInfo.maxRaSize = raArrSz;
19676 ret = rgSCHUtlAllocSBuf(cell->instIdx,
19677 (Data **)(&cell->raInfo.raReqLst), (Size)(lstSize * sizeof(CmLListCp)));
19683 cell->raInfo.lstSize = lstSize;
19690 * @brief This function handles the initialization of RACH Response
19691 * information at each DL subframe.
19695 * Function: rgSCHCmnDlRachInfoInit
19696 * Purpose: Each DL subframe stores the sfn and subframe information of
19697 * possible RACH response allowed for UL subframes. It generates
19698 * the information based on PRACH configuration.
19700 * Invoked by: Scheduler
19702 * @param[in] RgSchCellCb* cell
19707 PRIVATE S16 rgSCHCmnDlRachInfoInit
19712 PRIVATE S16 rgSCHCmnDlRachInfoInit(cell)
19717 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19720 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19721 [RGSCH_NUM_SUB_FRAMES-1];
19723 RgSchTddRachRspLst rachRspLst[3][RGSCH_NUM_SUB_FRAMES];
19731 RgSchTddRachDelInfo *delInfo;
19735 TRC2(rgSCHCmnDlRachInfoInit);
19737 cmMemset((U8 *)rachRspLst, 0, sizeof(rachRspLst));
19739 RG_SCH_CMN_CALC_RARSPLST_SIZE(cell, raArrSz);
19741 /* Include Special subframes */
19742 maxUlSubfrms = maxUlSubfrms + \
19743 rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx].switchPoints;
19744 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19746 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] ==
19747 RG_SCH_TDD_DL_SUBFRAME)
19749 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19753 startWin = (sfNum + RG_SCH_CMN_RARSP_WAIT_PRD + \
19754 ((RgSchCmnCell *)cell->sc.sch)->dl.numRaSubFrms);
19755 endWin = (startWin + cell->rachCfg.raWinSize - 1);
19757 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][startWin%RGSCH_NUM_SUB_FRAMES];
19758 /* Find the next DL subframe starting from Subframe 0 */
19759 if((startSubfrmIdx % RGSCH_NUM_SUB_FRAMES) == 0)
19761 startWin = RGSCH_CEIL(startWin, RGSCH_NUM_SUB_FRAMES);
19762 startWin = startWin * RGSCH_NUM_SUB_FRAMES;
19766 rgSchTddLowDlSubfrmIdxTbl[ulDlCfgIdx][endWin%RGSCH_NUM_SUB_FRAMES];
19767 endWin = (endWin/RGSCH_NUM_SUB_FRAMES) * RGSCH_NUM_SUB_FRAMES \
19769 if(startWin > endWin)
19773 /* Find all the possible RACH Response transmission
19774 * time within the RA window size */
19775 startSubfrmIdx = startWin%RGSCH_NUM_SUB_FRAMES;
19776 for(sfnIdx = startWin/RGSCH_NUM_SUB_FRAMES;
19777 sfnIdx <= endWin/RGSCH_NUM_SUB_FRAMES; sfnIdx++)
19779 if(sfnIdx == endWin/RGSCH_NUM_SUB_FRAMES)
19781 endSubfrmIdx = endWin%RGSCH_NUM_SUB_FRAMES;
19785 endSubfrmIdx = RGSCH_NUM_SUB_FRAMES-1;
19788 /* Find all the possible RACH Response transmission
19789 * time within radio frame */
19790 for(subfrmIdx = startSubfrmIdx;
19791 subfrmIdx <= endSubfrmIdx; subfrmIdx++)
19793 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][subfrmIdx] ==
19794 RG_SCH_TDD_UL_SUBFRAME)
19798 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
19799 /* Find the next DL subframe starting from Subframe 0 */
19800 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
19804 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx], subfrmIdx);
19806 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
19807 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset = sfnIdx;
19808 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].subframe[numSubfrms]
19810 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms++;
19812 startSubfrmIdx = RG_SCH_CMN_SUBFRM_0;
19814 /* Update the subframes to be deleted at this subframe */
19815 /* Get the subframe after the end of RA window size */
19818 sfnOffset = endWin/RGSCH_NUM_SUB_FRAMES;
19821 sfnOffset += raArrSz;
19823 sfnIdx = (endWin/RGSCH_NUM_SUB_FRAMES) % raArrSz;
19825 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx],endSubfrmIdx-1);
19826 if((endSubfrmIdx == RGSCH_NUM_SUB_FRAMES) ||
19827 (rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx] ==
19828 RGSCH_NUM_SUB_FRAMES))
19831 rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][RG_SCH_CMN_SUBFRM_0];
19835 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][endSubfrmIdx];
19838 delInfo = &rachRspLst[sfnIdx][subfrmIdx].delInfo;
19839 delInfo->sfnOffset = sfnOffset;
19840 delInfo->subframe[delInfo->numSubfrms] = sfNum;
19841 delInfo->numSubfrms++;
19843 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19846 ret = rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz);
19856 * @brief This function handles the initialization of PHICH information
19857 * for each DL subframe based on PHICH table.
19861 * Function: rgSCHCmnDlPhichOffsetInit
19862 * Purpose: Each DL subf stores the sfn and subf information of UL subframe
19863 * for which it trnsmts PHICH in this subframe. It generates the information
19864 * based on PHICH table.
19866 * Invoked by: Scheduler
19868 * @param[in] RgSchCellCb* cell
19873 PRIVATE S16 rgSCHCmnDlPhichOffsetInit
19878 PRIVATE S16 rgSCHCmnDlPhichOffsetInit(cell)
19883 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
19884 U8 maxDlSubfrms = cell->numDlSubfrms;
19891 RgSchTddSubfrmInfo ulSubfrmInfo = rgSchTddMaxUlSubfrmTbl[ulDlCfgIdx];
19892 U8 maxUlSubfrms = rgSchTddNumUlSubfrmTbl[ulDlCfgIdx]\
19893 [RGSCH_NUM_SUB_FRAMES-1];
19895 TRC2(rgSCHCmnDlPhichOffsetInit);
19897 /* Generate PHICH offset information for each DL subframe in a radio frame
19898 * Calculate this information based on K in PHICH table */
19899 for (sfCount = 0, sfNum = 0; sfCount < maxUlSubfrms; sfCount++)
19901 while(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][sfNum] !=
19902 RG_SCH_TDD_UL_SUBFRAME)
19904 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19908 calcSfNum = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) % \
19909 RGSCH_NUM_SUB_FRAMES;
19910 calcSfnOffset = (rgSchTddKPhichTbl[ulDlCfgIdx][sfNum] + sfNum) / \
19911 RGSCH_NUM_SUB_FRAMES;
19913 if(calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_1)
19917 else if((ulSubfrmInfo.switchPoints == 2) &&
19918 (calcSfNum <= RG_SCH_CMN_SPL_SUBFRM_6))
19920 dlIdx = calcSfNum - ulSubfrmInfo.numFrmHf1;
19924 dlIdx = calcSfNum - maxUlSubfrms;
19927 cell->subFrms[dlIdx]->phichOffInfo.subframe = sfNum;
19928 cell->subFrms[dlIdx]->phichOffInfo.numSubfrms = 1;
19930 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset = calcSfnOffset;
19932 /* set dlIdx for which phich offset is updated */
19933 dlPres = dlPres | (1 << dlIdx);
19934 sfNum = (sfNum+1) % RGSCH_NUM_SUB_FRAMES;
19937 /* Set Invalid information for which phich offset is not present */
19939 sfCount < rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19942 /* If dlPres is 0, phich offset is not present in that DL index */
19943 if(! ((dlPres >> sfCount)&0x01))
19945 cell->subFrms[sfCount]->phichOffInfo.sfnOffset =
19946 RGSCH_INVALID_INFO;
19947 cell->subFrms[sfCount]->phichOffInfo.subframe =
19948 RGSCH_INVALID_INFO;
19949 cell->subFrms[sfCount]->phichOffInfo.numSubfrms = 0;
19953 /* DL subframes in the subsequent radio frames are
19954 * initialized with the previous radio frames */
19955 for(dlIdx = RGSCH_NUM_SUB_FRAMES - maxUlSubfrms;
19956 dlIdx < maxDlSubfrms; dlIdx++)
19959 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1];
19961 cell->subFrms[dlIdx]->phichOffInfo.subframe =
19962 cell->subFrms[sfNum]->phichOffInfo.subframe;
19964 cell->subFrms[dlIdx]->phichOffInfo.sfnOffset =
19965 cell->subFrms[sfNum]->phichOffInfo.sfnOffset;
19972 * @brief Updation of Sch vars per TTI.
19976 * Function: rgSCHCmnUpdVars
19977 * Purpose: Updation of Sch vars per TTI.
19979 * @param[in] RgSchCellCb *cell
19984 PUBLIC Void rgSCHCmnUpdVars
19989 PUBLIC Void rgSCHCmnUpdVars(cell)
19993 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
19994 CmLteTimingInfo timeInfo;
19997 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
20000 TRC2(rgSCHCmnUpdVars);
20002 /* ccpu00132654-ADD- Initializing all the indices in every subframe*/
20003 rgSCHCmnInitVars(cell);
20005 idx = (cell->crntTime.slot + TFU_ULCNTRL_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20006 /* Calculate the UL scheduling subframe idx based on the
20008 if(rgSchTddPuschTxKTbl[ulDlCfgIdx][idx] != 0)
20010 /* PUSCH transmission is based on offset from DL
20011 * PDCCH scheduling */
20012 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20013 ulSubframe = rgSchTddPuschTxKTbl[ulDlCfgIdx][timeInfo.subframe];
20014 /* Add the DCI-0 to PUSCH time to get the time of UL subframe */
20015 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, ulSubframe);
20017 cellUl->schdTti = timeInfo.sfn * 10 + timeInfo.subframe;
20019 /* Fetch the corresponding UL subframe Idx in UL sf array */
20020 cellUl->schdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20021 /* Fetch the corresponding UL Harq Proc ID */
20022 cellUl->schdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20023 cellUl->schdTime = timeInfo;
20025 Mval = rgSchTddPhichMValTbl[ulDlCfgIdx][idx];
20028 /* Fetch the tx time for DL HIDCI-0 */
20029 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo, TFU_ULCNTRL_DLDELTA);
20030 /* Fetch the corresponding n-k tx time of PUSCH */
20031 cellUl->hqFdbkIdx[0] = rgSCHCmnGetPhichUlSfIdx(&timeInfo, cell);
20032 /* Retx will happen according to the Pusch k table */
20033 cellUl->reTxIdx[0] = cellUl->schdIdx;
20035 if(ulDlCfgIdx == 0)
20037 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[0] */
20038 cellUl->reTxIdx[0] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20039 cellUl->hqFdbkIdx[0]);
20042 /* At Idx 1 store the UL SF adjacent(left) to the UL SF
20044 cellUl->hqFdbkIdx[1] = (cellUl->hqFdbkIdx[0]-1 +
20045 cellUl->numUlSubfrms) % cellUl->numUlSubfrms;
20046 /* Calculate the ReTxIdx corresponding to hqFdbkIdx[1] */
20047 cellUl->reTxIdx[1] = rgSchUtlCfg0ReTxIdx(cell,timeInfo,
20048 cellUl->hqFdbkIdx[1]);
20053 idx = (cell->crntTime.slot + TFU_RECPREQ_DLDELTA) % RGSCH_NUM_SUB_FRAMES;
20054 if (rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] == RG_SCH_TDD_UL_SUBFRAME)
20056 RGSCHCMNADDTOCRNTTIME(cell->crntTime, timeInfo, TFU_RECPREQ_DLDELTA)
20057 cellUl->rcpReqIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20059 idx = (cell->crntTime.slot+RG_SCH_CMN_DL_DELTA) % RGSCH_NUM_SUB_FRAMES;
20061 /*[ccpu00134666]-MOD-Modify the check to schedule the RAR in
20062 special subframe */
20063 if(rgSchTddUlDlSubfrmTbl[ulDlCfgIdx][idx] != RG_SCH_TDD_UL_SUBFRAME)
20065 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20066 msg3Subfrm = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20067 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, msg3Subfrm);
20068 cellUl->msg3SchdIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20069 cellUl->msg3SchdHqProcIdx = rgSCHCmnGetUlHqProcIdx(&timeInfo, cell);
20072 if(!rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][idx])
20074 cellUl->spsUlRsrvIdx = RGSCH_INVALID_INFO;
20078 /* introduce some reuse with above code? */
20080 RGSCHCMNADDTOCRNTTIME(cell->crntTime,timeInfo,RG_SCH_CMN_DL_DELTA)
20081 //offst = rgSchTddMsg3SubfrmTbl[ulDlCfgIdx][timeInfo.subframe];
20082 offst = rgSchTddSpsUlRsrvTbl[ulDlCfgIdx][timeInfo.subframe];
20083 RGSCHCMNADDTOCRNTTIME(timeInfo, timeInfo, offst);
20084 cellUl->spsUlRsrvIdx = rgSCHCmnGetUlSfIdx(&timeInfo, cell);
20085 /* The harq proc continues to be accessed and used the same delta before
20086 * actual data occurance, and hence use the same idx */
20087 cellUl->spsUlRsrvHqProcIdx = cellUl->schdHqProcIdx;
20091 /* RACHO: update cmn sched specific RACH variables,
20092 * mainly the prachMaskIndex */
20093 rgSCHCmnUpdRachParam(cell);
20099 * @brief To get 'p' value from nCCE.
20103 * Function: rgSCHCmnGetPValFrmCCE
20104 * Purpose: Gets 'p' value for HARQ ACK/NACK reception from CCE.
20106 * @param[in] RgSchCellCb *cell
20107 * @param[in] U8 cce
20112 PUBLIC U8 rgSCHCmnGetPValFrmCCE
20118 PUBLIC U8 rgSCHCmnGetPValFrmCCE(cell, cce)
20124 TRC2(rgSCHCmnGetPValFrmCCE);
20126 for(i=1; i < RGSCH_TDD_MAX_P_PLUS_ONE_VAL; i++)
20128 if(cce < cell->rgSchTddNpValTbl[i])
20137 /***********************************************************
20139 * Func : rgSCHCmnUlAdapRetx
20141 * Desc : Adaptive retransmission for an allocation.
20149 **********************************************************/
20151 PRIVATE Void rgSCHCmnUlAdapRetx
20153 RgSchUlAlloc *alloc,
20154 RgSchUlHqProcCb *proc
20157 PRIVATE Void rgSCHCmnUlAdapRetx(alloc, proc)
20158 RgSchUlAlloc *alloc;
20159 RgSchUlHqProcCb *proc;
20162 TRC2(rgSCHCmnUlAdapRetx);
20164 rgSCHUhmRetx(proc, alloc);
20166 if (proc->rvIdx != 0)
20168 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[proc->rvIdx];
20173 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
20179 * @brief Scheduler invocation per TTI.
20183 * Function: rgSCHCmnHdlUlInactUes
20186 * Invoked by: Common Scheduler
20188 * @param[in] RgSchCellCb *cell
20192 PRIVATE Void rgSCHCmnHdlUlInactUes
20197 PRIVATE Void rgSCHCmnHdlUlInactUes(cell)
20201 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20202 CmLListCp ulInactvLst;
20203 TRC2(rgSCHCmnHdlUlInactUes);
20204 /* Get a List of Inactv UEs for UL*/
20205 cmLListInit(&ulInactvLst);
20207 /* Trigger Spfc Schedulers with Inactive UEs */
20208 rgSCHMeasGapANRepGetUlInactvUe (cell, &ulInactvLst);
20209 /* take care of this in UL retransmission */
20210 cellSch->apisUl->rgSCHUlInactvtUes(cell, &ulInactvLst);
20216 * @brief Scheduler invocation per TTI.
20220 * Function: rgSCHCmnHdlDlInactUes
20223 * Invoked by: Common Scheduler
20225 * @param[in] RgSchCellCb *cell
20229 PRIVATE Void rgSCHCmnHdlDlInactUes
20234 PRIVATE Void rgSCHCmnHdlDlInactUes(cell)
20238 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20239 CmLListCp dlInactvLst;
20240 TRC2(rgSCHCmnHdlDlInactUes);
20241 /* Get a List of Inactv UEs for DL */
20242 cmLListInit(&dlInactvLst);
20244 /* Trigger Spfc Schedulers with Inactive UEs */
20245 rgSCHMeasGapANRepGetDlInactvUe (cell, &dlInactvLst);
20247 cellSch->apisDl->rgSCHDlInactvtUes(cell, &dlInactvLst);
20251 /* RACHO: Rach handover functions start here */
20252 /***********************************************************
20254 * Func : rgSCHCmnUeIdleExdThrsld
20256 * Desc : RETURN ROK if UE has been idle more
20265 **********************************************************/
20267 PRIVATE S16 rgSCHCmnUeIdleExdThrsld
20273 PRIVATE S16 rgSCHCmnUeIdleExdThrsld(cell, ue)
20278 /* Time difference in subframes */
20279 U32 sfDiff = RGSCH_CALC_SF_DIFF(cell->crntTime, ue->ul.ulTransTime);
20281 TRC2(rgSCHCmnUeIdleExdThrsld);
20283 if (sfDiff > (U32)RG_SCH_CMN_UE_IDLE_THRSLD(ue))
20295 * @brief Scheduler processing for Ded Preambles on cell configuration.
20299 * Function : rgSCHCmnCfgRachDedPrm
20301 * This function does requisite initialisation
20302 * for RACH Ded Preambles.
20305 * @param[in] RgSchCellCb *cell
20309 PRIVATE Void rgSCHCmnCfgRachDedPrm
20314 PRIVATE Void rgSCHCmnCfgRachDedPrm(cell)
20318 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20319 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20322 TRC2(rgSCHCmnCfgRachDedPrm);
20324 if (cell->macPreambleSet.pres == NOTPRSNT)
20328 cellSch->rachCfg.numDedPrm = cell->macPreambleSet.size;
20329 cellSch->rachCfg.dedPrmStart = cell->macPreambleSet.start;
20330 /* Initialize handover List */
20331 cmLListInit(&cellSch->rachCfg.hoUeLst);
20332 /* Initialize pdcch Order List */
20333 cmLListInit(&cellSch->rachCfg.pdcchOdrLst);
20335 /* Intialize the rapId to UE mapping structure */
20336 for (cnt = 0; cnt<cellSch->rachCfg.numDedPrm; cnt++)
20338 cellSch->rachCfg.rapIdMap[cnt].rapId = cellSch->rachCfg.dedPrmStart + \
20340 cmLListInit(&cellSch->rachCfg.rapIdMap[cnt].assgndUes);
20342 /* Perform Prach Mask Idx, remDedPrm, applFrm initializations */
20343 /* Set remDedPrm as numDedPrm */
20344 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20345 /* Initialize applFrm */
20346 cellSch->rachCfg.prachMskIndx = 0;
20347 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_EVEN)
20349 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + \
20350 (cell->crntTime.sfn % 2)) % RGSCH_MAX_SFN;
20353 else if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ODD)
20355 if((cell->crntTime.sfn%2) == 0)
20357 cellSch->rachCfg.applFrm.sfn = (cell->crntTime.sfn + 1)\
20364 cellSch->rachCfg.applFrm.sfn = cell->crntTime.sfn;
20366 /* Initialize cellSch->rachCfg.applFrm as >= crntTime.
20367 * This is because of RGSCH_CALC_SF_DIFF logic */
20368 if (cellSch->rachCfg.applFrm.sfn == cell->crntTime.sfn)
20370 while (cellSch->rachCfg.prachMskIndx < cell->rachCfg.raOccasion.size)
20372 if (cell->crntTime.slot <\
20373 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx])
20377 cellSch->rachCfg.prachMskIndx++;
20379 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size)
20381 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20383 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) %\
20388 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) %\
20391 cellSch->rachCfg.prachMskIndx = 0;
20393 cellSch->rachCfg.applFrm.slot = \
20394 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20398 cellSch->rachCfg.applFrm.slot = \
20399 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20402 /* Note first param to this macro should always be the latest in time */
20403 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20404 while (sfDiff <= gap)
20406 rgSCHCmnUpdNxtPrchMskIdx(cell);
20407 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, cell->crntTime);
20414 * @brief Updates the PRACH MASK INDEX.
20418 * Function: rgSCHCmnUpdNxtPrchMskIdx
20419 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20420 * CFG is always >= "n"+"DELTA", where "n" is the crntTime
20421 * of the cell. If not, applFrm is updated to the next avl
20422 * PRACH oppurtunity as per the PRACH Cfg Index configuration.
20425 * Invoked by: Common Scheduler
20427 * @param[in] RgSchCellCb *cell
20431 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx
20436 PRIVATE Void rgSCHCmnUpdNxtPrchMskIdx(cell)
20440 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20441 TRC2(rgSCHCmnUpdNxtPrchMskIdx);
20443 /* Determine the next prach mask Index */
20444 if (cellSch->rachCfg.prachMskIndx == cell->rachCfg.raOccasion.size - 1)
20446 /* PRACH within applFrm.sfn are done, go to next AVL sfn */
20447 cellSch->rachCfg.prachMskIndx = 0;
20448 if (cell->rachCfg.raOccasion.sfnEnum == RGR_SFN_ANY)
20450 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+1) % \
20453 else/* RGR_SFN_EVEN or RGR_SFN_ODD */
20455 cellSch->rachCfg.applFrm.sfn = (cellSch->rachCfg.applFrm.sfn+2) % \
20458 cellSch->rachCfg.applFrm.slot = cell->rachCfg.raOccasion.\
20461 else /* applFrm.sfn is still valid */
20463 cellSch->rachCfg.prachMskIndx += 1;
20464 if ( cellSch->rachCfg.prachMskIndx < RGR_MAX_SUBFRAME_NUM )
20466 cellSch->rachCfg.applFrm.slot = \
20467 cell->rachCfg.raOccasion.subFrameNum[cellSch->rachCfg.prachMskIndx];
20474 * @brief Updates the Ded preamble RACH parameters
20479 * Function: rgSCHCmnUpdRachParam
20480 * Purpose: Ensures the "applFrm" field of Cmn Sched RACH
20481 * CFG is always >= "n"+"6"+"DELTA", where "n" is the crntTime
20482 * of the cell. If not, applFrm is updated to the next avl
20483 * PRACH oppurtunity as per the PRACH Cfg Index configuration,
20484 * accordingly the "remDedPrm" is reset to "numDedPrm" and
20485 * "prachMskIdx" field is updated as per "applFrm".
20488 * Invoked by: Common Scheduler
20490 * @param[in] RgSchCellCb *cell
20494 PRIVATE Void rgSCHCmnUpdRachParam
20499 PRIVATE Void rgSCHCmnUpdRachParam(cell)
20504 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20505 U32 gap = RG_SCH_CMN_MIN_PRACH_OPPR_GAP;
20507 TRC2(rgSCHCmnUpdRachParam);
20509 if (cell->macPreambleSet.pres == NOTPRSNT)
20513 sfDiff = RGSCH_CALC_SF_DIFF(cellSch->rachCfg.applFrm, \
20517 /* applFrm is still a valid next Prach Oppurtunity */
20520 rgSCHCmnUpdNxtPrchMskIdx(cell);
20521 /* Reset remDedPrm as numDedPrm */
20522 cellSch->rachCfg.remDedPrm = cellSch->rachCfg.numDedPrm;
20528 * @brief Dedicated Preamble allocation function.
20532 * Function: rgSCHCmnAllocPOParam
20533 * Purpose: Allocate pdcch, rapId and PrachMskIdx.
20534 * Set mapping of UE with the allocated rapId.
20536 * Invoked by: Common Scheduler
20538 * @param[in] RgSchCellCb *cell
20539 * @param[in] RgSchDlSf *dlSf
20540 * @param[in] RgSchUeCb *ue
20541 * @param[out] RgSchPdcch **pdcch
20542 * @param[out] U8 *rapId
20543 * @param[out] U8 *prachMskIdx
20547 PRIVATE S16 rgSCHCmnAllocPOParam
20552 RgSchPdcch **pdcch,
20557 PRIVATE S16 rgSCHCmnAllocPOParam(cell, dlSf, ue, pdcch, rapId, prachMskIdx)
20561 RgSchPdcch **pdcch;
20567 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20568 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20570 TRC2(rgSCHCmnAllocPOParam);
20572 if (cell->macPreambleSet.pres == PRSNT_NODEF)
20574 if (cellSch->rachCfg.remDedPrm == 0)
20578 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20579 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20583 /* The stored prachMskIdx is the index of PRACH Oppurtunities in
20584 * raOccasions.subframes[].
20585 * Converting the same to the actual PRACHMskIdx to be transmitted. */
20586 *prachMskIdx = cellSch->rachCfg.prachMskIndx + 1;
20587 /* Distribution starts from dedPrmStart till dedPrmStart + numDedPrm */
20588 *rapId = cellSch->rachCfg.dedPrmStart +
20589 cellSch->rachCfg.numDedPrm - cellSch->rachCfg.remDedPrm;
20590 cellSch->rachCfg.remDedPrm--;
20591 /* Map UE with the allocated RapId */
20592 ueDl->rachInfo.asgnOppr = cellSch->rachCfg.applFrm;
20593 RGSCH_ARRAY_BOUND_CHECK_WITH_POS_IDX(cell->instIdx, cellSch->rachCfg.rapIdMap, (*rapId - cellSch->rachCfg.dedPrmStart));
20594 cmLListAdd2Tail(&cellSch->rachCfg.rapIdMap[*rapId - cellSch->rachCfg.dedPrmStart].assgndUes,
20595 &ueDl->rachInfo.rapIdLnk);
20596 ueDl->rachInfo.rapIdLnk.node = (PTR)ue;
20597 ueDl->rachInfo.poRapId = *rapId;
20599 else /* if dedicated preambles not configured */
20601 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
20602 if ((*pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE)) == NULLP)
20614 * @brief Dowlink Scheduling Handler.
20618 * Function: rgSCHCmnGenPdcchOrder
20619 * Purpose: For each UE in PO Q, grab a PDCCH,
20620 * get an available ded RapId and fill PDCCH
20621 * with PO information.
20623 * Invoked by: Common Scheduler
20625 * @param[in] RgSchCellCb *cell
20626 * @param[in] RgSchDlSf *dlSf
20630 PRIVATE Void rgSCHCmnGenPdcchOrder
20636 PRIVATE Void rgSCHCmnGenPdcchOrder(cell, dlSf)
20641 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20642 CmLList *node = cellSch->rachCfg.pdcchOdrLst.first;
20646 RgSchPdcch *pdcch = NULLP;
20648 TRC2(rgSCHCmnGenPdcchOrder);
20652 ue = (RgSchUeCb *)node->node;
20654 /* Skip sending for this subframe is Measuring or inActive in UL due
20655 * to MeasGap or inactie due to DRX
20657 if ((ue->measGapCb.isMeasuring == TRUE) ||
20658 (ue->ul.ulInactvMask & RG_MEASGAP_INACTIVE) ||
20659 (ue->isDrxEnabled &&
20660 ue->dl.dlInactvMask & RG_DRX_INACTIVE)
20665 if (rgSCHCmnAllocPOParam(cell, dlSf, ue, &pdcch, &rapId,\
20666 &prachMskIdx) != ROK)
20668 /* No More rapIds left for the valid next avl Oppurtunity.
20669 * Unsatisfied UEs here would be given a chance, when the
20670 * prach Mask Index changes as per rachUpd every TTI */
20672 /* PDDCH can also be ordered with rapId=0, prachMskIdx=0
20673 * so that UE triggers a RACH procedure with non-dedicated preamble.
20674 * But the implementation here does not do this. Instead, the "break"
20675 * here implies, that PDCCH Odr always given with valid rapId!=0,
20676 * prachMskIdx!=0 if dedicated preambles are configured.
20677 * If not configured, then trigger a PO with rapId=0,prchMskIdx=0*/
20680 /* Fill pdcch with pdcch odr information */
20681 rgSCHCmnFillPdcchOdr2Sf(cell, ue, pdcch, rapId, prachMskIdx);
20682 /* Remove this UE from the PDCCH ORDER QUEUE */
20683 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20684 /* Reset UE's power state */
20685 rgSCHPwrUeReset(cell, ue);
20692 * @brief This function add UE to PdcchOdr Q if not already present.
20696 * Function: rgSCHCmnDlAdd2PdcchOdrQ
20699 * Invoked by: CMN Scheduler
20701 * @param[in] RgSchCellCb* cell
20702 * @param[in] RgSchUeCb* ue
20707 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ
20713 PRIVATE Void rgSCHCmnDlAdd2PdcchOdrQ(cell, ue)
20718 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20719 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20721 TRC2(rgSCHCmnDlAdd2PdcchOdrQ);
20723 if (ueDl->rachInfo.poLnk.node == NULLP)
20725 cmLListAdd2Tail(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20726 ueDl->rachInfo.poLnk.node = (PTR)ue;
20733 * @brief This function rmvs UE to PdcchOdr Q if not already present.
20737 * Function: rgSCHCmnDlRmvFrmPdcchOdrQ
20740 * Invoked by: CMN Scheduler
20742 * @param[in] RgSchCellCb* cell
20743 * @param[in] RgSchUeCb* ue
20748 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ
20754 PRIVATE Void rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue)
20759 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20760 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20762 TRC2(rgSCHCmnDlRmvFrmPdcchOdrQ);
20764 cmLListDelFrm(&cellSch->rachCfg.pdcchOdrLst, &ueDl->rachInfo.poLnk);
20765 ueDl->rachInfo.poLnk.node = NULLP;
20770 * @brief Fill pdcch with PDCCH order information.
20774 * Function: rgSCHCmnFillPdcchOdr2Sf
20775 * Purpose: Fill PDCCH with PDCCH order information,
20777 * Invoked by: Common Scheduler
20779 * @param[in] RgSchUeCb *ue
20780 * @param[in] RgSchPdcch *pdcch
20781 * @param[in] U8 rapId
20782 * @param[in] U8 prachMskIdx
20786 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf
20795 PRIVATE Void rgSCHCmnFillPdcchOdr2Sf(ue, pdcch, rapId, prachMskIdx)
20803 RgSchUeACqiCb *acqiCb = RG_SCH_CMN_GET_ACQICB(ue,cell);
20805 TRC2(rgSCHCmnFillPdcchOdr2Sf);
20807 pdcch->rnti = ue->ueId;
20808 pdcch->dci.dciFormat = TFU_DCI_FORMAT_1A;
20809 pdcch->dci.u.format1aInfo.isPdcchOrder = TRUE;
20810 pdcch->dci.u.format1aInfo.t.pdcchOrder.preambleIdx = rapId;
20811 pdcch->dci.u.format1aInfo.t.pdcchOrder.prachMaskIdx = prachMskIdx;
20813 /* Request for APer CQI immediately after PDCCH Order */
20814 /* CR ccpu00144525 */
20816 if(ue->dl.ueDlCqiCfg.aprdCqiCfg.pres)
20818 ue->dl.reqForCqi = RG_SCH_APCQI_SERVING_CC;
20819 acqiCb->aCqiTrigWt = 0;
20828 * @brief UE deletion for scheduler.
20832 * Function : rgSCHCmnDelRachInfo
20834 * This functions deletes all scheduler information
20835 * pertaining to an UE.
20837 * @param[in] RgSchCellCb *cell
20838 * @param[in] RgSchUeCb *ue
20842 PRIVATE Void rgSCHCmnDelRachInfo
20848 PRIVATE Void rgSCHCmnDelRachInfo(cell, ue)
20853 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
20854 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20857 TRC2(rgSCHCmnDelRachInfo);
20859 if (ueDl->rachInfo.poLnk.node)
20861 rgSCHCmnDlRmvFrmPdcchOdrQ(cell, ue);
20863 if (ueDl->rachInfo.hoLnk.node)
20865 cmLListDelFrm(&cellSch->rachCfg.hoUeLst, &ueDl->rachInfo.hoLnk);
20866 ueDl->rachInfo.hoLnk.node = NULLP;
20868 if (ueDl->rachInfo.rapIdLnk.node)
20870 rapIdIdx = ueDl->rachInfo.poRapId - cellSch->rachCfg.dedPrmStart;
20871 cmLListDelFrm(&cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes,
20872 &ueDl->rachInfo.rapIdLnk);
20873 ueDl->rachInfo.rapIdLnk.node = NULLP;
20879 * @brief This function retrieves the ue which has sent this raReq
20880 * and it allocates grant for UEs undergoing (for which RAR
20881 * is being generated) HandOver/PdcchOrder.
20886 * Function: rgSCHCmnHdlHoPo
20887 * Purpose: This function retrieves the ue which has sent this raReq
20888 * and it allocates grant for UEs undergoing (for which RAR
20889 * is being generated) HandOver/PdcchOrder.
20891 * Invoked by: Common Scheduler
20893 * @param[in] RgSchCellCb *cell
20894 * @param[out] CmLListCp *raRspLst
20895 * @param[in] RgSchRaReqInfo *raReq
20900 PRIVATE Void rgSCHCmnHdlHoPo
20903 CmLListCp *raRspLst,
20904 RgSchRaReqInfo *raReq
20907 PRIVATE Void rgSCHCmnHdlHoPo(cell, raRspLst, raReq)
20909 CmLListCp *raRspLst;
20910 RgSchRaReqInfo *raReq;
20913 RgSchUeCb *ue = raReq->ue;
20914 TRC2(rgSCHCmnHdlHoPo);
20916 if ( ue->isDrxEnabled )
20918 rgSCHDrxDedRa(cell,ue);
20920 rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq);
20925 * @brief This function retrieves the UE which has sent this raReq
20926 * for handover case.
20931 * Function: rgSCHCmnGetHoUe
20932 * Purpose: This function retrieves the UE which has sent this raReq
20933 * for handover case.
20935 * Invoked by: Common Scheduler
20937 * @param[in] RgSchCellCb *cell
20938 * @param[in] RgSchRaReqInfo *raReq
20939 * @return RgSchUeCb*
20943 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe
20949 PUBLIC RgSchUeCb* rgSCHCmnGetHoUe(cell, rapId)
20954 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20958 RgSchCmnDlUe *ueDl;
20959 TRC2(rgSCHCmnGetHoUe);
20961 ueLst = &cellSch->rachCfg.hoUeLst;
20962 node = ueLst->first;
20965 ue = (RgSchUeCb *)node->node;
20967 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
20968 if (ueDl->rachInfo.hoRapId == rapId)
20977 PRIVATE Void rgSCHCmnDelDedPreamble
20983 PRIVATE rgSCHCmnDelDedPreamble(cell, preambleId)
20988 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
20992 RgSchCmnDlUe *ueDl;
20993 TRC2(rgSCHCmnDelDedPreamble);
20995 ueLst = &cellSch->rachCfg.hoUeLst;
20996 node = ueLst->first;
20999 ue = (RgSchUeCb *)node->node;
21001 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21002 if (ueDl->rachInfo.hoRapId == preambleId)
21004 cmLListDelFrm(ueLst, &ueDl->rachInfo.hoLnk);
21005 ueDl->rachInfo.hoLnk.node = (PTR)NULLP;
21011 * @brief This function retrieves the UE which has sent this raReq
21012 * for PDCCh Order case.
21017 * Function: rgSCHCmnGetPoUe
21018 * Purpose: This function retrieves the UE which has sent this raReq
21019 * for PDCCH Order case.
21021 * Invoked by: Common Scheduler
21023 * @param[in] RgSchCellCb *cell
21024 * @param[in] RgSchRaReqInfo *raReq
21025 * @return RgSchUeCb*
21029 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe
21033 CmLteTimingInfo timingInfo
21036 PUBLIC RgSchUeCb* rgSCHCmnGetPoUe(cell, rapId, timingInfo)
21039 CmLteTimingInfo timingInfo;
21042 RgSchCmnCell *cellSch = (RgSchCmnCell *)(cell->sc.sch);
21046 RgSchCmnDlUe *ueDl;
21048 TRC2(rgSCHCmnGetPoUe);
21050 rapIdIdx = rapId -cellSch->rachCfg.dedPrmStart;
21051 ueLst = &cellSch->rachCfg.rapIdMap[rapIdIdx].assgndUes;
21052 node = ueLst->first;
21055 ue = (RgSchUeCb *)node->node;
21057 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
21058 /* Remove UEs irrespective.
21059 * Old UE associations are removed.*/
21060 cmLListDelFrm(ueLst, &ueDl->rachInfo.rapIdLnk);
21061 ueDl->rachInfo.rapIdLnk.node = (PTR)NULLP;
21062 if (RGSCH_TIMEINFO_SAME(ueDl->rachInfo.asgnOppr, timingInfo))
21073 * @brief This function returns the valid UL cqi for a given UE.
21077 * Function: rgSCHCmnUlGetCqi
21078 * Purpose: This function returns the "valid UL cqi" for a given UE
21079 * based on UE category
21081 * Invoked by: Scheduler
21083 * @param[in] RgSchUeCb *ue
21084 * @param[in] U8 ueCtgy
21088 PUBLIC U8 rgSCHCmnUlGetCqi
21092 CmLteUeCategory ueCtgy
21095 PUBLIC U8 rgSCHCmnUlGetCqi(cell, ue, ueCtgy)
21098 CmLteUeCategory ueCtgy;
21101 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21104 TRC2(rgSCHCmnUlGetCqi);
21106 cqi = ueUl->maxUlCqi;
21108 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21109 (ueUl->validUlCqi > ueUl->maxUlCqi)))
21111 cqi = ueUl->validUlCqi;
21114 if (!((ueCtgy != CM_LTE_UE_CAT_5) &&
21115 (ueUl->crntUlCqi[0] > ueUl->maxUlCqi )))
21117 cqi = ueUl->crntUlCqi[0];
21121 }/* End of rgSCHCmnUlGetCqi */
21123 /***********************************************************
21125 * Func : rgSCHCmnUlRbAllocForPoHoUe
21127 * Desc : Do uplink RB allocation for a HO/PO UE.
21131 * Notes: Note that as of now, for retx, maxRb
21132 * is not considered. Alternatives, such
21133 * as dropping retx if it crosses maxRb
21134 * could be considered.
21138 **********************************************************/
21140 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe
21148 PRIVATE S16 rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, maxRb)
21155 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21156 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21157 U8 sbSize = cellUl->sbSize;
21158 U32 maxBits = ue->ul.maxBytesPerUePerTti*8;
21160 RgSchUlAlloc *alloc;
21170 RgSchUlHqProcCb *proc = &ueUl->hqEnt.hqProcCb[cellUl->msg3SchdHqProcIdx];
21171 CmLteUeCategory ueCtg = (CmLteUeCategory)(RG_SCH_CMN_GET_UE_CTGY(ue));
21173 TRC2(rgSCHCmnUlRbAllocForPoHoUe);
21174 if ((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
21178 /*MS_WORKAROUND for HO ccpu00121116*/
21179 cqi = rgSCHCmnUlGetCqi(cell, ue, ueCtg);
21180 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend], cqi);
21181 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21182 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs,ueCtg);
21183 while(iMcs > RG_SCH_CMN_MAX_MSG3_IMCS)
21186 iTbs = rgSchCmnUlCqiToTbsTbl[(U8)cell->isCpUlExtend][cqi];
21187 iMcs = rgSCHCmnUlGetIMcsFrmITbs(iTbs, ueCtg);
21189 /* Filling the modorder in the grant structure*/
21190 RG_SCH_UL_MCS_TO_MODODR(iMcs,modOdr);
21191 if (!cell->isCpUlExtend)
21193 eff = rgSchCmnNorUlEff[0][iTbs];
21197 eff = rgSchCmnExtUlEff[0][iTbs];
21200 bits = ueUl->alloc.reqBytes * 8;
21202 #if (ERRCLASS & ERRCLS_DEBUG)
21209 if (bits < rgSCHCmnUlMinTbBitsForITbs(cellUl, iTbs))
21212 nPrb = numSb * sbSize;
21216 if (bits > maxBits)
21219 nPrb = bits * 1024 / eff / RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl);
21224 numSb = nPrb / sbSize;
21228 /*ccpu00128775:MOD-Change to get upper threshold nPrb*/
21229 nPrb = RGSCH_CEIL((RGSCH_CEIL(bits * 1024, eff)),
21230 RG_SCH_CMN_UL_NUM_RE_PER_RB(cellUl));
21235 numSb = RGSCH_DIV_ROUND(nPrb, sbSize);
21240 alloc = rgSCHCmnUlSbAlloc(sf, (U8)RGSCH_MIN(numSb, cellUl->maxSbPerUe),\
21242 if (alloc == NULLP)
21244 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
21245 "rgSCHCmnUlRbAllocForPoHoUe(): Could not get UlAlloc");
21248 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
21250 /* Filling the modorder in the grant structure start*/
21251 alloc->grnt.modOdr = (TfuModScheme) modOdr;
21252 alloc->grnt.iMcs = iMcs;
21253 alloc->grnt.iMcsCrnt = iMcsCrnt;
21254 alloc->grnt.hop = 0;
21255 /* Fix for ccpu00123915*/
21256 alloc->forMsg3 = TRUE;
21257 alloc->hqProc = proc;
21258 alloc->hqProc->ulSfIdx = cellUl->msg3SchdIdx;
21260 alloc->rnti = ue->ueId;
21261 /* updating initNumRbs in case of HO */
21263 ue->initNumRbs = alloc->grnt.numRb;
21265 ueUl->alloc.alloc = alloc;
21266 iTbs = rgSCHCmnUlGetITbsFrmIMcs(iMcs);
21267 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[0], iTbs);
21268 alloc->grnt.datSz = rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1] / 8;
21269 /* MS_WORKAROUND for HO ccpu00121124*/
21270 /*[Adi temp change] Need to fil modOdr */
21271 RG_SCH_UL_MCS_TO_MODODR(alloc->grnt.iMcsCrnt,alloc->grnt.modOdr);
21272 rgSCHUhmNewTx(proc, ueUl->hqEnt.maxHqRetx, alloc);
21273 /* No grant attr recorded now */
21278 * @brief This function allocates grant for UEs undergoing (for which RAR
21279 * is being generated) HandOver/PdcchOrder.
21284 * Function: rgSCHCmnAllocPoHoGrnt
21285 * Purpose: This function allocates grant for UEs undergoing (for which RAR
21286 * is being generated) HandOver/PdcchOrder.
21288 * Invoked by: Common Scheduler
21290 * @param[in] RgSchCellCb *cell
21291 * @param[out] CmLListCp *raRspLst,
21292 * @param[in] RgSchUeCb *ue
21293 * @param[in] RgSchRaReqInfo *raReq
21298 PRIVATE Void rgSCHCmnAllocPoHoGrnt
21301 CmLListCp *raRspLst,
21303 RgSchRaReqInfo *raReq
21306 PRIVATE Void rgSCHCmnAllocPoHoGrnt(cell, raRspLst, ue, raReq)
21308 CmLListCp *raRspLst;
21310 RgSchRaReqInfo *raReq;
21313 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21314 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(ue,cell);
21316 RgSchUlSf *sf = &cellUl->ulSfArr[cellUl->msg3SchdIdx];
21318 TRC2(rgSCHCmnAllocPoHoGrnt);
21320 /* Clearing previous allocs if any*/
21321 rgSCHCmnUlUeDelAllocs(cell, ue);
21322 /* Fix : syed allocs are limited */
21323 if (*sf->allocCountRef >= cellUl->maxAllocPerUlSf)
21327 ueUl->alloc.reqBytes = RG_SCH_MIN_GRNT_HOPO;
21328 if (rgSCHCmnUlRbAllocForPoHoUe(cell, sf, ue, RGSCH_MAX_UL_RB) != ROK)
21333 /* Fill grant information */
21334 grnt = &ueUl->alloc.alloc->grnt;
21339 RLOG_ARG1(L_ERROR,DBG_INSTID,cell->instIdx, "Failed to get"
21340 "the grant for HO/PDCCH Order. CRNTI:%d",ue->ueId);
21343 ue->ul.rarGrnt.rapId = raReq->raReq.rapId;
21344 ue->ul.rarGrnt.hop = grnt->hop;
21345 ue->ul.rarGrnt.rbStart = grnt->rbStart;
21346 ue->ul.rarGrnt.numRb = grnt->numRb;
21347 ue->ul.rarGrnt.tpc = grnt->tpc;
21348 ue->ul.rarGrnt.iMcsCrnt = grnt->iMcsCrnt;
21349 ue->ul.rarGrnt.ta.pres = TRUE;
21350 ue->ul.rarGrnt.ta.val = raReq->raReq.ta;
21351 ue->ul.rarGrnt.datSz = grnt->datSz;
21352 if((sf->numACqiCount < RG_SCH_MAX_ACQI_PER_ULSF) && (RG_SCH_APCQI_NO != ue->dl.reqForCqi))
21356 /* Send two bits cqireq field if more than one cells are configured else one*/
21357 for (idx = 1;idx < CM_LTE_MAX_CELLS;idx++)
21359 if (ue->cellInfo[idx] != NULLP)
21361 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21365 if (idx == CM_LTE_MAX_CELLS)
21368 ue->ul.rarGrnt.cqiReqBit = ue->dl.reqForCqi;
21370 ue->dl.reqForCqi = RG_SCH_APCQI_NO;
21371 sf->numACqiCount++;
21375 ue->ul.rarGrnt.cqiReqBit = 0;
21377 /* Attach Ho/Po allocation to RAR Rsp cont free Lst */
21378 cmLListAdd2Tail(raRspLst, &ue->ul.rarGrnt.raRspLnk);
21379 ue->ul.rarGrnt.raRspLnk.node = (PTR)ue;
21385 * @brief This is a utility function to set the fields in
21386 * an UL harq proc which is identified for non-adaptive retx
21390 * Function: rgSCHCmnUlNonadapRetx
21391 * Purpose: Sets the fields in UL Harq proc for non-adaptive retx
21393 * @param[in] RgSchCmnUlCell *cellUl
21394 * @param[out] RgSchUlAlloc *alloc
21395 * @param[in] U8 idx
21401 PRIVATE Void rgSCHCmnUlNonadapRetx
21403 RgSchCmnUlCell *cellUl,
21404 RgSchUlAlloc *alloc,
21408 PRIVATE Void rgSCHCmnUlNonadapRetx(cellUl, alloc, idx)
21409 RgSchCmnUlCell *cellUl;
21410 RgSchUlAlloc *alloc;
21414 TRC2(rgSCHCmnUlNonadapRetx);
21415 rgSCHUhmRetx(alloc->hqProc, alloc);
21417 /* Update alloc to retx */
21418 alloc->hqProc->isRetx = TRUE;
21419 alloc->hqProc->ulSfIdx = cellUl->reTxIdx[idx];
21421 if (alloc->hqProc->rvIdx != 0)
21423 alloc->grnt.iMcsCrnt = rgSchCmnUlRvIdxToIMcsTbl[alloc->hqProc->rvIdx];
21427 alloc->grnt.iMcsCrnt = alloc->grnt.iMcs;
21429 alloc->grnt.isRtx = TRUE;
21430 alloc->pdcch = NULLP;
21434 * @brief Check if 2 allocs overlap
21438 * Function : rgSCHCmnUlAllocsOvrLap
21440 * - Return TRUE if alloc1 and alloc2 overlap.
21442 * @param[in] RgSchUlAlloc *alloc1
21443 * @param[in] RgSchUlAlloc *alloc2
21447 PRIVATE Bool rgSCHCmnUlAllocsOvrLap
21449 RgSchUlAlloc *alloc1,
21450 RgSchUlAlloc *alloc2
21453 PRIVATE Bool rgSCHCmnUlAllocsOvrLap(alloc1, alloc2)
21454 RgSchUlAlloc *alloc1;
21455 RgSchUlAlloc *alloc2;
21459 TRC2(rgSCHCmnUlAllocsOvrLap);
21461 if (((alloc1->sbStart >= alloc2->sbStart) &&
21462 (alloc1->sbStart <= alloc2->sbStart + alloc2->numSb-1)) ||
21463 ((alloc2->sbStart >= alloc1->sbStart) &&
21464 (alloc2->sbStart <= alloc1->sbStart + alloc1->numSb-1)))
21471 * @brief Copy allocation Info from src to dst.
21475 * Function : rgSCHCmnUlCpyAllocInfo
21477 * - Copy allocation Info from src to dst.
21479 * @param[in] RgSchUlAlloc *srcAlloc
21480 * @param[in] RgSchUlAlloc *dstAlloc
21484 PRIVATE Void rgSCHCmnUlCpyAllocInfo
21487 RgSchUlAlloc *srcAlloc,
21488 RgSchUlAlloc *dstAlloc
21491 PRIVATE Void rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc)
21493 RgSchUlAlloc *srcAlloc;
21494 RgSchUlAlloc *dstAlloc;
21497 RgSchCmnUlUe *ueUl;
21498 TRC2(rgSCHCmnUlCpyAllocInfo);
21500 dstAlloc->grnt = srcAlloc->grnt;
21501 dstAlloc->hqProc = srcAlloc->hqProc;
21502 /* Fix : syed During UE context release, hqProc->alloc
21503 * was pointing to srcAlloc instead of dstAlloc and
21504 * freeing from incorrect sf->allocDb was
21505 * corrupting the list. */
21506 /* In case of SPS Occasion Allocation is done in advance and
21507 at a later time Hq Proc is linked. Hence HqProc
21508 pointer in alloc shall be NULL */
21510 if (dstAlloc->hqProc)
21513 dstAlloc->hqProc->alloc = dstAlloc;
21515 dstAlloc->ue = srcAlloc->ue;
21516 dstAlloc->rnti = srcAlloc->rnti;
21517 dstAlloc->forMsg3 = srcAlloc->forMsg3;
21518 dstAlloc->raCb = srcAlloc->raCb;
21519 dstAlloc->pdcch = srcAlloc->pdcch;
21520 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
21523 ueUl = RG_SCH_CMN_GET_UL_UE(dstAlloc->ue,cell);
21524 ueUl->alloc.alloc = dstAlloc;
21526 if (dstAlloc->ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
21528 if((dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc != NULLP)
21529 && (dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc == srcAlloc))
21531 dstAlloc->ue->ul.ulSpsInfo.ulSpsSchdInfo.crntAlloc = dstAlloc;
21540 * @brief Update TX and RETX subframe's allocation
21545 * Function : rgSCHCmnUlInsAllocFrmNewSf2OldSf
21547 * - Release all preassigned allocations of newSf and merge
21549 * - If alloc of newSf collide with one or more allocs of oldSf
21550 * - mark all such allocs of oldSf for Adaptive Retx.
21551 * - Swap the alloc and hole DB references of oldSf and newSf.
21553 * @param[in] RgSchCellCb *cell
21554 * @param[in] RgSchUlSf *newSf
21555 * @param[in] RgSchUlSf *oldSf
21556 * @param[in] RgSchUlAlloc *srcAlloc
21560 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf
21565 RgSchUlAlloc *srcAlloc
21568 PRIVATE Void rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, srcAlloc)
21572 RgSchUlAlloc *srcAlloc;
21575 RgSchUlAlloc *alloc, *dstAlloc, *nxtAlloc;
21577 /* MS_WORKAROUND ccpu00120827 */
21578 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
21580 TRC2(rgSCHCmnUlInsAllocFrmNewSf2OldSf);
21582 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21586 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21587 /* If there is an overlap between alloc and srcAlloc
21588 * then alloc is marked for Adaptive retx and it is released
21590 if (rgSCHCmnUlAllocsOvrLap(alloc, srcAlloc) == TRUE)
21592 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21593 rgSCHUtlUlAllocRls(oldSf, alloc);
21595 /* No further allocs spanning the srcAlloc subbands */
21596 if (srcAlloc->sbStart + srcAlloc->numSb - 1 <= alloc->sbStart)
21600 } while ((alloc = nxtAlloc) != NULLP);
21603 /* After freeing all the colliding allocs, request for an allocation
21604 * specifying the start and numSb with in txSf. This function should
21605 * always return positively with a nonNULL dstAlloc */
21606 /* MS_WORKAROUND ccpu00120827 */
21607 remAllocs = schCmnCell->ul.maxAllocPerUlSf - *oldSf->allocCountRef;
21610 /* Fix : If oldSf already has max Allocs then release the
21611 * old RETX alloc to make space for new alloc of newSf.
21612 * newSf allocs(i.e new Msg3s) are given higher priority
21613 * over retx allocs. */
21614 if ((alloc = rgSCHUtlUlAllocFirst(oldSf)) != NULLP)
21618 nxtAlloc = rgSCHUtlUlAllocNxt(oldSf, alloc);
21619 if (!alloc->mrgdNewTxAlloc)
21621 /* If alloc is for RETX */
21622 /* TODO: Incase of this ad also in case of choosing
21623 * and alloc for ADAP RETX, we need to send ACK for
21624 * the corresponding alloc in PHICH */
21625 #ifndef EMTC_ENABLE
21626 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc);
21628 rgSCHCmnUlFreeAllocation(cell, oldSf, alloc,FALSE);
21632 }while((alloc = nxtAlloc) != NULLP);
21635 dstAlloc = rgSCHUtlUlGetSpfcAlloc(oldSf, srcAlloc->sbStart, srcAlloc->numSb);
21637 /* This should never happen */
21638 if (dstAlloc == NULLP)
21640 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"CRNTI:%d "
21641 "rgSCHUtlUlGetSpfcAlloc failed in rgSCHCmnUlInsAllocFrmNewSf2OldSf",
21646 /* Copy the srcAlloc's state information in to dstAlloc */
21647 rgSCHCmnUlCpyAllocInfo(cell, srcAlloc, dstAlloc);
21648 /* Set new Tx merged Alloc Flag to TRUE, indicating that this
21649 * alloc shall not be processed for non-adaptive retransmission */
21650 dstAlloc->mrgdNewTxAlloc = TRUE;
21654 * @brief Merge all allocations of newSf to oldSf.
21658 * Function : rgSCHCmnUlMergeSfAllocs
21660 * - Merge all allocations of newSf to oldSf.
21661 * - If newSf's alloc collides with oldSf's alloc
21662 * then oldSf's alloc is marked for adaptive Retx
21663 * and is released from oldSf to create space for
21666 * @param[in] RgSchCellCb *cell
21667 * @param[in] RgSchUlSf *oldSf
21668 * @param[in] RgSchUlSf *newSf
21672 PRIVATE Void rgSCHCmnUlMergeSfAllocs
21679 PRIVATE Void rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf)
21685 RgSchUlAlloc *alloc, *nxtAlloc;
21686 TRC2(rgSCHCmnUlMergeSfAllocs);
21689 /* Merge each alloc of newSf in to oldSf
21690 * and release it from newSf */
21691 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21695 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21696 rgSCHCmnUlInsAllocFrmNewSf2OldSf(cell, newSf, oldSf, alloc);
21697 rgSCHUtlUlAllocRls(newSf, alloc);
21698 } while((alloc = nxtAlloc) != NULLP);
21703 * @brief Swap Hole/Alloc DB context of newSf and oldSf.
21707 * Function : rgSCHCmnUlSwapSfAllocs
21709 * - Swap Hole/Alloc DB context of newSf and oldSf.
21711 * @param[in] RgSchCellCb *cell
21712 * @param[in] RgSchUlSf *oldSf
21713 * @param[in] RgSchUlSf *newSf
21717 PRIVATE Void rgSCHCmnUlSwapSfAllocs
21724 PRIVATE Void rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf)
21730 RgSchUlAllocDb *tempAllocDb = newSf->allocDb;
21731 RgSchUlHoleDb *tempHoleDb = newSf->holeDb;
21732 U8 tempAvailSbs = newSf->availSubbands;
21734 TRC2(rgSCHCmnUlSwapSfAllocs);
21737 newSf->allocDb = oldSf->allocDb;
21738 newSf->holeDb = oldSf->holeDb;
21739 newSf->availSubbands = oldSf->availSubbands;
21741 oldSf->allocDb = tempAllocDb;
21742 oldSf->holeDb = tempHoleDb;
21743 oldSf->availSubbands = tempAvailSbs;
21745 /* Fix ccpu00120610*/
21746 newSf->allocCountRef = &newSf->allocDb->count;
21747 oldSf->allocCountRef = &oldSf->allocDb->count;
21751 * @brief Perform non-adaptive RETX for non-colliding allocs.
21755 * Function : rgSCHCmnUlPrcNonAdptRetx
21757 * - Perform non-adaptive RETX for non-colliding allocs.
21759 * @param[in] RgSchCellCb *cell
21760 * @param[in] RgSchUlSf *newSf
21761 * @param[in] U8 idx
21765 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx
21772 PRIVATE Void rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx)
21778 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21779 RgSchUlAlloc *alloc, *nxtAlloc;
21780 TRC2(rgSCHCmnUlPrcNonAdptRetx);
21782 /* perform non-adaptive retx allocation(adjustment) */
21783 if ((alloc = rgSCHUtlUlAllocFirst(newSf)) != NULLP)
21787 nxtAlloc = rgSCHUtlUlAllocNxt(newSf, alloc);
21788 /* A merged new TX alloc, reset the state and skip */
21789 if (alloc->mrgdNewTxAlloc)
21791 alloc->mrgdNewTxAlloc = FALSE;
21796 rgSCHCmnUlNonadapRetx(cellUl, alloc, idx);
21798 } while((alloc = nxtAlloc) != NULLP);
21804 * @brief Update TX and RETX subframe's allocation
21809 * Function : rgSCHCmnUlPrfmSfMerge
21811 * - Release all preassigned allocations of newSf and merge
21813 * - If alloc of newSf collide with one or more allocs of oldSf
21814 * - mark all such allocs of oldSf for Adaptive Retx.
21815 * - Swap the alloc and hole DB references of oldSf and newSf.
21816 * - The allocs which did not collide with pre-assigned msg3
21817 * allocs are marked for non-adaptive RETX.
21819 * @param[in] RgSchCellCb *cell
21820 * @param[in] RgSchUlSf *oldSf
21821 * @param[in] RgSchUlSf *newSf
21822 * @param[in] U8 idx
21826 PRIVATE Void rgSCHCmnUlPrfmSfMerge
21834 PRIVATE Void rgSCHCmnUlPrfmSfMerge(cell, oldSf, newSf, idx)
21841 TRC2(rgSCHCmnUlPrfmSfMerge);
21842 /* Preassigned resources for msg3 in newSf.
21843 * Hence do adaptive retx for all NACKED TXs */
21844 rgSCHCmnUlMergeSfAllocs(cell, oldSf, newSf);
21845 /* swap alloc and hole DBs of oldSf and newSf. */
21846 rgSCHCmnUlSwapSfAllocs(cell, oldSf, newSf);
21847 /* Here newSf has the resultant merged allocs context */
21848 /* Perform non-adaptive RETX for non-colliding allocs */
21849 rgSCHCmnUlPrcNonAdptRetx(cell, newSf, idx);
21855 * @brief Update TX and RETX subframe's allocation
21860 * Function : rgSCHCmnUlRmvCmpltdAllocs
21862 * - Free all Transmission which are ACKED
21863 * OR for which MAX retransmission have
21867 * @param[in] RgSchCellCb *cell,
21868 * @param[in] RgSchUlSf *sf
21872 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs
21878 PRIVATE Void rgSCHCmnUlRmvCmpltdAllocs(cell, sf)
21883 RgSchUlAlloc *alloc, *nxtAlloc;
21884 TRC2(rgSCHCmnUlRmvCmpltdAllocs);
21886 if ((alloc = rgSCHUtlUlAllocFirst(sf)) == NULLP)
21892 nxtAlloc = rgSCHUtlUlAllocNxt(sf, alloc);
21894 printf("rgSCHCmnUlRmvCmpltdAllocs:time(%d %d) alloc->hqProc->remTx %d hqProcId(%d) \n",cell->crntTime.sfn,cell->crntTime.slot,alloc->hqProc->remTx, alloc->grnt.hqProcId);
21896 alloc->hqProc->rcvdCrcInd = TRUE;
21897 if ((alloc->hqProc->rcvdCrcInd) || (alloc->hqProc->remTx == 0))
21900 /* SR_RACH_STATS : MSG 3 MAX RETX FAIL*/
21901 if ((alloc->forMsg3 == TRUE) && (alloc->hqProc->remTx == 0))
21903 rgNumMsg3FailMaxRetx++;
21905 cell->tenbStats->sch.msg3Fail++;
21909 #ifdef MAC_SCH_STATS
21910 if(alloc->ue != NULLP)
21912 /* access from ulHarqProc*/
21913 RgSchUeCb *ueCb = alloc->ue;
21914 RgSchCmnUe *cmnUe = (RgSchCmnUe*)ueCb->sch;
21915 RgSchCmnUlUe *ulUe = &(cmnUe->ul);
21916 U8 cqi = ulUe->crntUlCqi[0];
21917 U16 numUlRetx = ueCb->ul.hqEnt.maxHqRetx - alloc->hqProc->remTx;
21919 hqRetxStats.ulCqiStat[(cqi - 1)].mcs = alloc->grnt.iMcs;
21924 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1++;
21927 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2++;
21930 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3++;
21933 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4++;
21936 hqRetxStats.ulCqiStat[(cqi - 1)].totalTx = \
21937 hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_1 + \
21938 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_2 * 2) + \
21939 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_3 * 3) + \
21940 (hqRetxStats.ulCqiStat[(cqi - 1)].numOfHQ_4 * 4);
21943 #endif /*MAC_SCH_STATS*/
21944 rgSCHCmnUlFreeAllocation(cell, sf, alloc);
21946 /*ccpu00106104 MOD added check for AckNackRep */
21947 /*added check for acknack so that adaptive retx considers ue
21948 inactivity due to ack nack repetition*/
21949 else if((alloc->ue != NULLP) && (TRUE != alloc->forMsg3))
21951 rgSCHCmnUlUpdAllocRetx(cell, alloc);
21952 rgSCHUtlUlAllocRls(sf, alloc);
21954 } while ((alloc = nxtAlloc) != NULLP);
21960 * @brief Update an uplink subframe.
21964 * Function : rgSCHCmnRlsUlSf
21966 * For each allocation
21967 * - if no more tx needed
21968 * - Release allocation
21970 * - Perform retransmission
21972 * @param[in] RgSchUlSf *sf
21973 * @param[in] U8 idx
21977 PUBLIC Void rgSCHCmnRlsUlSf
21983 PUBLIC Void rgSCHCmnRlsUlSf(cell, idx)
21988 TRC2(rgSCHCmnRlsUlSf);
21990 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
21992 if (cellUl->hqFdbkIdx[idx] != RGSCH_INVALID_INFO)
21994 RgSchUlSf *oldSf = &cellUl->ulSfArr[cellUl->hqFdbkIdx[idx]];
21996 /* Initialize the reTxLst of UL HqProcs for RETX subframe */
21997 if (rgSCHUtlUlAllocFirst(oldSf) == NULLP)
22001 /* Release all completed TX allocs from sf */
22002 rgSCHCmnUlRmvCmpltdAllocs(cell, oldSf);
22004 oldSf->numACqiCount = 0;
22010 * @brief Handle uplink allocation for retransmission.
22014 * Function : rgSCHCmnUlUpdAllocRetx
22016 * - Perform adaptive retransmission
22018 * @param[in] RgSchUlSf *sf
22019 * @param[in] RgSchUlAlloc *alloc
22023 PRIVATE Void rgSCHCmnUlUpdAllocRetx
22026 RgSchUlAlloc *alloc
22029 PRIVATE Void rgSCHCmnUlUpdAllocRetx(cell, alloc)
22031 RgSchUlAlloc *alloc;
22034 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
22036 TRC2(rgSCHCmnUlUpdAllocRetx);
22038 alloc->hqProc->reTxAlloc.rnti = alloc->rnti;
22039 alloc->hqProc->reTxAlloc.numSb = alloc->numSb;
22040 alloc->hqProc->reTxAlloc.iMcs = alloc->grnt.iMcs;
22042 alloc->hqProc->reTxAlloc.dciFrmt = alloc->grnt.dciFrmt;
22043 alloc->hqProc->reTxAlloc.numLyr = alloc->grnt.numLyr;
22044 alloc->hqProc->reTxAlloc.vrbgStart = alloc->grnt.vrbgStart;
22045 alloc->hqProc->reTxAlloc.numVrbg = alloc->grnt.numVrbg;
22046 alloc->hqProc->reTxAlloc.modOdr = alloc->grnt.modOdr;
22048 //iTbs = rgSCHCmnUlGetITbsFrmIMcs(alloc->grnt.iMcs);
22049 //iTbs = alloc->grnt.iMcs;
22050 //RGSCH_ARRAY_BOUND_CHECK( 0, rgTbSzTbl[0], iTbs);
22051 alloc->hqProc->reTxAlloc.tbSz = alloc->grnt.datSz;
22052 //rgTbSzTbl[0][iTbs][alloc->grnt.numRb-1]/8;
22053 alloc->hqProc->reTxAlloc.ue = alloc->ue;
22054 alloc->hqProc->reTxAlloc.forMsg3 = alloc->forMsg3;
22055 alloc->hqProc->reTxAlloc.raCb = alloc->raCb;
22057 /* Set as retransmission is pending */
22058 alloc->hqProc->isRetx = TRUE;
22059 alloc->hqProc->alloc = NULLP;
22060 alloc->hqProc->ulSfIdx = RGSCH_INVALID_INFO;
22062 printf("Adding Harq Proc Id in the retx list hqProcId %d \n",alloc->grnt.hqProcId);
22064 cmLListAdd2Tail(&cmnUlCell->reTxLst, &alloc->hqProc->reTxLnk);
22065 alloc->hqProc->reTxLnk.node = (PTR)alloc->hqProc;
22070 * @brief Attempts allocation for msg3s for which ADAP retransmissions
22075 * Function : rgSCHCmnUlAdapRetxAlloc
22077 * Attempts allocation for msg3s for which ADAP retransmissions
22080 * @param[in] RgSchCellCb *cell
22081 * @param[in] RgSchUlSf *sf
22082 * @param[in] RgSchUlHqProcCb *proc;
22083 * @param[in] RgSchUlHole *hole;
22087 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc
22091 RgSchUlHqProcCb *proc,
22095 PRIVATE Bool rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole)
22098 RgSchUlHqProcCb *proc;
22102 U8 numSb = proc->reTxAlloc.numSb;
22103 U8 iMcs = proc->reTxAlloc.iMcs;
22104 CmLteTimingInfo frm = cell->crntTime;
22105 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22108 RgSchUlAlloc *alloc;
22109 TRC2(rgSCHCmnUlAdapRetxAlloc);
22111 /* Fetch PDCCH for msg3 */
22112 /* ccpu00116293 - Correcting relation between UL subframe and DL subframe based on RG_UL_DELTA*/
22113 /* Introduced timing delta for UL control */
22114 RGSCH_INCR_SUB_FRAME(frm, TFU_ULCNTRL_DLDELTA);
22115 dlSf = rgSCHUtlSubFrmGet(cell, frm);
22116 pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
22117 if (pdcch == NULLP)
22122 /* Fetch UL Alloc for msg3 */
22123 if (numSb <= hole->num)
22125 alloc = rgSCHUtlUlAllocGetHole(sf, numSb, hole);
22130 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22131 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
22132 "UL Alloc fail for msg3 retx for rnti: %d\n",
22133 proc->reTxAlloc.rnti);
22137 rgSCHCmnUlAllocFillRbInfo(cell, sf, alloc);
22138 alloc->grnt.iMcs = iMcs;
22139 alloc->grnt.datSz = proc->reTxAlloc.tbSz;
22142 //RG_SCH_UL_MCS_TO_MODODR(iMcs, alloc->grnt.modOdr);
22144 /* Fill UL Alloc for msg3 */
22145 /* RACHO : setting nDmrs to 0 and UlDelaybit to 0*/
22146 alloc->grnt.nDmrs = 0;
22147 alloc->grnt.hop = 0;
22148 alloc->grnt.delayBit = 0;
22149 alloc->grnt.isRtx = TRUE;
22150 proc->ulSfIdx = cellUl->schdIdx;
22152 proc->schdTime = cellUl->schdTime;
22153 alloc->grnt.hqProcId = proc->procId;
22154 alloc->grnt.dciFrmt = proc->reTxAlloc.dciFrmt;
22155 alloc->grnt.numLyr = proc->reTxAlloc.numLyr;
22156 alloc->grnt.vrbgStart = proc->reTxAlloc.vrbgStart;
22157 alloc->grnt.numVrbg = proc->reTxAlloc.numVrbg;
22158 alloc->grnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG, alloc->grnt.vrbgStart, alloc->grnt.numVrbg);
22159 alloc->grnt.modOdr = proc->reTxAlloc.modOdr;
22161 /* TODO : Hardcoding these as of now */
22162 alloc->grnt.hop = 0;
22163 alloc->grnt.SCID = 0;
22164 alloc->grnt.xPUSCHRange = MAX_5GTF_XPUSCH_RANGE;
22165 alloc->grnt.PMI = 0;
22166 alloc->grnt.uciOnxPUSCH = 0;
22168 alloc->rnti = proc->reTxAlloc.rnti;
22169 /* Fix : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22170 alloc->ue = proc->reTxAlloc.ue;
22171 alloc->pdcch = pdcch;
22172 alloc->forMsg3 = proc->reTxAlloc.forMsg3;
22173 alloc->raCb = proc->reTxAlloc.raCb;
22174 alloc->hqProc = proc;
22175 alloc->isAdaptive = TRUE;
22177 sf->totPrb += alloc->grnt.numRb;
22179 /* FIX : syed HandIn Ue has forMsg3 and ue Set, but no RaCb */
22182 alloc->raCb->msg3Grnt= alloc->grnt;
22184 /* To the crntTime, add the time at which UE will
22185 * actually send MSG3 */
22186 alloc->raCb->msg3AllocTime = cell->crntTime;
22187 RGSCH_INCR_SUB_FRAME(alloc->raCb->msg3AllocTime, RG_SCH_CMN_MIN_RETXMSG3_RECP_INTRVL);
22189 alloc->raCb->msg3AllocTime = cellUl->schdTime;
22191 rgSCHCmnUlAdapRetx(alloc, proc);
22192 /* Fill PDCCH with alloc info */
22193 pdcch->rnti = alloc->rnti;
22194 pdcch->dci.dciFormat = TFU_DCI_FORMAT_0;
22195 pdcch->dci.u.format0Info.hoppingEnbld = alloc->grnt.hop;
22196 pdcch->dci.u.format0Info.rbStart = alloc->grnt.rbStart;
22197 pdcch->dci.u.format0Info.numRb = alloc->grnt.numRb;
22198 pdcch->dci.u.format0Info.mcs = alloc->grnt.iMcsCrnt;
22199 pdcch->dci.u.format0Info.ndi = alloc->hqProc->ndi;
22200 pdcch->dci.u.format0Info.nDmrs = alloc->grnt.nDmrs;
22201 pdcch->dci.u.format0Info.tpcCmd = alloc->grnt.tpc;
22205 /* ulIdx setting for cfg 0 shall be appropriately fixed thru ccpu00109015 */
22206 pdcch->dci.u.format0Info.ulIdx = RG_SCH_ULIDX_MSB;
22207 pdcch->dci.u.format0Info.dai = RG_SCH_MAX_DAI_IDX;
22210 pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_0];
22214 RgSchCmnUlUe *ueUl = RG_SCH_CMN_GET_UL_UE(alloc->ue,cell);
22216 alloc->ue->initNumRbs = (alloc->grnt.numVrbg * MAX_5GTF_VRBG_SIZE);
22219 ue->ul.nPrb = alloc->grnt.numRb;
22221 ueUl->alloc.alloc = alloc;
22222 /* FIx: Removed the call to rgSCHCmnUlAdapRetx */
22223 rgSCHCmnUlUeFillAllocInfo(cell, alloc->ue);
22224 /* Setting csireq as false for Adaptive Retx*/
22225 ueUl->alloc.alloc->pdcch->dci.u.format0Info.cqiReq = RG_SCH_APCQI_NO;
22226 pdcch->dciNumOfBits = alloc->ue->dciSize.cmnSize[TFU_DCI_FORMAT_0];
22228 /* Reset as retransmission is done */
22229 proc->isRetx = FALSE;
22231 else /* Intg fix */
22233 rgSCHUtlPdcchPut(cell, &dlSf->pdcchInfo, pdcch);
22234 RLOG_ARG1(L_DEBUG,DBG_CELLID,cell->cellId,
22235 "Num SB not suffiecient for adap retx for rnti: %d",
22236 proc->reTxAlloc.rnti);
22242 /* Fix: syed Adaptive Msg3 Retx crash. */
22244 * @brief Releases all Adaptive Retx HqProcs which failed for
22245 * allocations in this scheduling occassion.
22249 * Function : rgSCHCmnUlSfRlsRetxProcs
22252 * @param[in] RgSchCellCb *cell
22253 * @param[in] RgSchUlSf *sf
22258 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs
22264 PRIVATE Void rgSCHCmnUlSfRlsRetxProcs(cell, sf)
22271 RgSchUlHqProcCb *proc;
22272 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22274 TRC2(rgSCHCmnUlSfRlsRetxProcs);
22276 cp = &(cellUl->reTxLst);
22280 proc = (RgSchUlHqProcCb *)node->node;
22282 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22283 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22284 proc->reTxLnk.node = (PTR)NULLP;
22291 * @brief Attempts allocation for UEs for which retransmissions
22296 * Function : rgSCHCmnUlSfReTxAllocs
22298 * Attempts allocation for UEs for which retransmissions
22301 * @param[in] RgSchCellCb *cell
22302 * @param[in] RgSchUlSf *sf
22306 PRIVATE Void rgSCHCmnUlSfReTxAllocs
22312 PRIVATE Void rgSCHCmnUlSfReTxAllocs(cell, sf)
22319 RgSchUlHqProcCb *proc;
22322 RgSchCmnCell *schCmnCell = (RgSchCmnCell *)(cell->sc.sch);
22323 RgSchCmnUlCell *cellUl = RG_SCH_CMN_GET_UL_CELL(cell);
22324 TRC2(rgSCHCmnUlSfReTxAllocs);
22326 cp = &(cellUl->reTxLst);
22330 proc = (RgSchUlHqProcCb *)node->node;
22331 ue = proc->reTxAlloc.ue;
22333 /*ccpu00106104 MOD added check for AckNackRep */
22334 /*added check for acknack so that adaptive retx considers ue
22335 inactivity due to ack nack repetition*/
22336 if((ue != NULLP) &&
22337 ((ue->measGapCb.isMeasuring == TRUE)||
22338 (ue->ackNakRepCb.isAckNakRep == TRUE)))
22342 /* Fix for ccpu00123917: Check if maximum allocs per UL sf have been exhausted */
22343 if (((hole = rgSCHUtlUlHoleFirst(sf)) == NULLP)
22344 || (sf->allocDb->count == schCmnCell->ul.maxAllocPerUlSf))
22346 /* No more UL BW then return */
22349 /* perform adaptive retx for UE's */
22350 if (rgSCHCmnUlAdapRetxAlloc(cell, sf, proc, hole) == FALSE)
22354 /* ccpu00137834 : Deleting reTxLnk from the respective reTxLst */
22355 cmLListDelFrm(&cellUl->reTxLst, &proc->reTxLnk);
22356 /* Fix: syed Adaptive Msg3 Retx crash. */
22357 proc->reTxLnk.node = (PTR)NULLP;
22363 * @brief Handles RB allocation for downlink.
22367 * Function : rgSCHCmnDlRbAlloc
22369 * Invoking Module Processing:
22370 * - This function is invoked for DL RB allocation
22372 * Processing Steps:
22373 * - If cell is frequency selecive,
22374 * - Call rgSCHDlfsAllocRb().
22376 * - Call rgSCHCmnNonDlfsRbAlloc().
22378 * @param[in] RgSchCellCb *cell
22379 * @param[in] RgSchDlRbAllocInfo *allocInfo
22384 PRIVATE Void rgSCHCmnDlRbAlloc
22387 RgSchCmnDlRbAllocInfo *allocInfo
22390 PRIVATE Void rgSCHCmnDlRbAlloc(cell, allocInfo)
22392 RgSchCmnDlRbAllocInfo *allocInfo;
22395 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
22396 TRC2(rgSCHCmnDlRbAlloc);
22398 if (cellSch->dl.isDlFreqSel)
22400 printf("5GTF_ERROR DLFS SCH Enabled\n");
22401 cellSch->apisDlfs->rgSCHDlfsAllocRb(cell, allocInfo);
22405 rgSCHCmnNonDlfsRbAlloc(cell, allocInfo);
22413 * @brief Determines number of RBGs and RBG subset sizes for the given DL
22414 * bandwidth and rbgSize
22417 * Function : rgSCHCmnDlGetRbgInfo
22420 * Processing Steps:
22421 * - Fill-up rbgInfo data structure for given DL bandwidth and rbgSize
22423 * @param[in] U8 dlTotalBw
22424 * @param[in] U8 dlSubsetBw
22425 * @param[in] U8 maxRaType1SubsetBw
22426 * @param[in] U8 rbgSize
22427 * @param[out] RgSchBwRbgInfo *rbgInfo
22431 PUBLIC Void rgSCHCmnDlGetRbgInfo
22435 U8 maxRaType1SubsetBw,
22437 RgSchBwRbgInfo *rbgInfo
22440 PUBLIC Void rgSCHCmnDlGetRbgInfo(dlTotalBw, dlSubsetBw, maxRaType1SubsetBw,
22444 U8 maxRaType1SubsetBw;
22446 RgSchBwRbgInfo *rbgInfo;
22449 #ifdef RGSCH_SPS_UNUSED
22451 U8 lastRbgIdx = ((dlTotalBw + rbgSize - 1)/rbgSize) - 1;
22452 U8 currRbgSize = rbgSize;
22453 U8 subsetSizeIdx = 0;
22454 U8 subsetSize[RG_SCH_NUM_RATYPE1_SUBSETS] = {0};
22455 U8 lastRbgSize = rbgSize - (dlTotalBw - ((dlTotalBw/rbgSize) * rbgSize));
22456 U8 numRaType1Rbgs = (maxRaType1SubsetBw + rbgSize - 1)/rbgSize;
22459 /* Compute maximum number of SPS RBGs for the cell */
22460 rbgInfo->numRbgs = ((dlSubsetBw + rbgSize - 1)/rbgSize);
22462 #ifdef RGSCH_SPS_UNUSED
22463 /* Distribute RBGs across subsets except last RBG */
22464 for (;idx < numRaType1Rbgs - 1; ++idx)
22466 subsetSize[subsetSizeIdx] += currRbgSize;
22467 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22470 /* Computation for last RBG */
22471 if (idx == lastRbgIdx)
22473 currRbgSize = lastRbgSize;
22475 subsetSize[subsetSizeIdx] += currRbgSize;
22476 subsetSizeIdx = (subsetSizeIdx + 1) % rbgSize;
22479 /* Update the computed sizes */
22480 #ifdef RGSCH_SPS_UNUSED
22481 rbgInfo->lastRbgSize = currRbgSize;
22483 rbgInfo->lastRbgSize = rbgSize -
22484 (dlSubsetBw - ((dlSubsetBw/rbgSize) * rbgSize));
22485 #ifdef RGSCH_SPS_UNUSED
22486 cmMemcpy((U8 *)rbgInfo->rbgSubsetSize, (U8 *) subsetSize, 4 * sizeof(U8));
22488 rbgInfo->numRbs = (rbgInfo->numRbgs * rbgSize > dlTotalBw) ?
22489 dlTotalBw:(rbgInfo->numRbgs * rbgSize);
22490 rbgInfo->rbgSize = rbgSize;
22494 * @brief Handles RB allocation for Resource allocation type 0
22498 * Function : rgSCHCmnDlRaType0Alloc
22500 * Invoking Module Processing:
22501 * - This function is invoked for DL RB allocation for resource allocation
22504 * Processing Steps:
22505 * - Determine the available positions in the rbgMask.
22506 * - Allocate RBGs in the available positions.
22507 * - Update RA Type 0, RA Type 1 and RA type 2 masks.
22509 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22510 * @param[in] U8 rbsReq
22511 * @param[in] RgSchBwRbgInfo *rbgInfo
22512 * @param[out] U8 *numAllocRbs
22513 * @param[out] RgSchDlSfAllocInfo *resAllocInfo
22514 * @param[in] Bool isPartialAlloc
22520 PUBLIC U8 rgSCHCmnDlRaType0Alloc
22522 RgSchDlSfAllocInfo *allocedInfo,
22524 RgSchBwRbgInfo *rbgInfo,
22526 RgSchDlSfAllocInfo *resAllocInfo,
22527 Bool isPartialAlloc
22530 PUBLIC U8 rgSCHCmnDlRaType0Alloc(allocedInfo, rbsReq, rbgInfo,
22531 numAllocRbs, resAllocInfo, isPartialAlloc)
22532 RgSchDlSfAllocInfo *allocedInfo;
22534 RgSchBwRbgInfo *rbgInfo;
22536 RgSchDlSfAllocInfo *resAllocInfo;
22537 Bool isPartialAlloc;
22540 /* Note: This function atttempts allocation only full allocation */
22541 U32 remNumRbs, rbgPosInRbgMask, ueRaType2Mask;
22542 U8 type2MaskIdx, cnt, rbIdx;
22544 U8 bestNumAvailRbs = 0;
22546 U8 numAllocRbgs = 0;
22547 U8 rbgSize = rbgInfo->rbgSize;
22548 U32 *rbgMask = &(resAllocInfo->raType0Mask);
22549 #ifdef RGSCH_SPS_UNUSED
22552 U32 *raType1Mask = resAllocInfo->raType1Mask;
22553 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22555 U32 *raType2Mask = resAllocInfo->raType2Mask;
22557 U32 allocedMask = allocedInfo->raType0Mask;
22559 maskSize = rbgInfo->numRbgs;
22562 RG_SCH_CMN_DL_COUNT_ONES(allocedMask, maskSize, &usedRbs);
22563 if (maskSize == usedRbs)
22565 /* All RBGs are allocated, including the last one */
22570 remNumRbs = (maskSize - usedRbs - 1) * rbgSize; /* vamsee: removed minus 1 */
22572 /* If last RBG is available, add last RBG size */
22573 if (!(allocedMask & (1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(maskSize - 1))))
22575 remNumRbs += rbgInfo->lastRbgSize;
22579 /* If complete allocation is needed, check if total requested RBs are available else
22580 * check the best available RBs */
22581 if (!isPartialAlloc)
22583 if (remNumRbs >= rbsReq)
22585 bestNumAvailRbs = rbsReq;
22590 bestNumAvailRbs = remNumRbs > rbsReq ? rbsReq : remNumRbs;
22593 /* Allocate for bestNumAvailRbs */
22594 if (bestNumAvailRbs)
22596 for (rbg = 0; rbg < maskSize - 1; ++rbg)
22598 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22599 if (!(allocedMask & rbgPosInRbgMask))
22601 /* Update RBG mask */
22602 *rbgMask |= rbgPosInRbgMask;
22604 /* Compute RB index of the first RB of the RBG allocated */
22605 rbIdx = rbg * rbgSize;
22607 for (cnt = 0; cnt < rbgSize; ++cnt)
22609 #ifdef RGSCH_SPS_UNUSED
22610 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22612 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22613 #ifdef RGSCH_SPS_UNUSED
22614 /* Update RBG mask for RA type 1 */
22615 raType1Mask[rbgSubset] |= ueRaType1Mask;
22616 raType1UsedRbs[rbgSubset]++;
22618 /* Update RA type 2 mask */
22619 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22622 *numAllocRbs += rbgSize;
22623 remNumRbs -= rbgSize;
22625 if (*numAllocRbs >= bestNumAvailRbs)
22631 /* If last RBG available and allocation is not completed, allocate
22633 if (*numAllocRbs < bestNumAvailRbs)
22635 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22636 *rbgMask |= rbgPosInRbgMask;
22637 *numAllocRbs += rbgInfo->lastRbgSize;
22639 /* Compute RB index of the first RB of the last RBG */
22640 rbIdx = ((rbgInfo->numRbgs - 1 ) * rbgSize ); /* removed minus 1 vamsee */
22642 for (cnt = 0; cnt < rbgInfo->lastRbgSize; ++cnt)
22644 #ifdef RGSCH_SPS_UNUSED
22645 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22647 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22648 #ifdef RGSCH_SPS_UNUSED
22649 /* Update RBG mask for RA type 1 */
22650 raType1Mask[rbgSubset] |= ueRaType1Mask;
22651 raType1UsedRbs[rbgSubset]++;
22653 /* Update RA type 2 mask */
22654 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22657 remNumRbs -= rbgInfo->lastRbgSize;
22660 /* Note: this should complete allocation, not checking for the
22664 return (numAllocRbgs);
22667 #ifdef RGSCH_SPS_UNUSED
22669 * @brief Handles RB allocation for Resource allocation type 1
22673 * Function : rgSCHCmnDlRaType1Alloc
22675 * Invoking Module Processing:
22676 * - This function is invoked for DL RB allocation for resource allocation
22679 * Processing Steps:
22680 * - Determine the available positions in the subsets.
22681 * - Allocate RB in the available subset.
22682 * - Update RA Type1, RA type 0 and RA type 2 masks.
22684 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22685 * @param[in] U8 rbsReq
22686 * @param[in] RgSchBwRbgInfo *rbgInfo
22687 * @param[in] U8 startRbgSubset
22688 * @param[in] U8 *allocRbgSubset
22689 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22690 * @param[in] Bool isPartialAlloc
22693 * Number of allocated RBs
22697 PUBLIC U8 rgSCHCmnDlRaType1Alloc
22699 RgSchDlSfAllocInfo *allocedInfo,
22701 RgSchBwRbgInfo *rbgInfo,
22703 U8 *allocRbgSubset,
22704 RgSchDlSfAllocInfo *resAllocInfo,
22705 Bool isPartialAlloc
22708 PUBLIC U8 rgSCHCmnDlRaType1Alloc(allocedInfo, rbsReq,rbgInfo,startRbgSubset,
22709 allocRbgSubset, resAllocInfo, isPartialAlloc)
22710 RgSchDlSfAllocInfo *allocedInfo;
22712 RgSchBwRbgInfo *rbgInfo;
22714 U8 *allocRbgSubset;
22715 RgSchDlSfAllocInfo *resAllocInfo;
22716 Bool isPartialAlloc;
22719 /* Note: This function atttempts only full allocation */
22720 U8 *rbgSubsetSzArr;
22721 U8 type2MaskIdx, subsetIdx, rbIdx, rbInSubset, rbgInSubset;
22722 U8 offset, rbg, maskSize, bestSubsetIdx;
22724 U8 bestNumAvailRbs = 0;
22725 U8 numAllocRbs = 0;
22726 U32 ueRaType2Mask, ueRaType0Mask, rbPosInSubset;
22727 U32 remNumRbs, allocedMask;
22729 U8 rbgSize = rbgInfo->rbgSize;
22730 U8 rbgSubset = startRbgSubset;
22731 U32 *rbgMask = &resAllocInfo->raType0Mask;
22732 U32 *raType1Mask = resAllocInfo->raType1Mask;
22733 U32 *raType2Mask = resAllocInfo->raType2Mask;
22734 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22735 U32 *allocMask = allocedInfo->raType1Mask;
22737 /* Initialize the subset size Array */
22738 rbgSubsetSzArr = rbgInfo->rbgSubsetSize;
22740 /* Perform allocation for RA type 1 */
22741 for (subsetIdx = 0;subsetIdx < rbgSize; ++subsetIdx)
22743 allocedMask = allocMask[rbgSubset];
22744 maskSize = rbgSubsetSzArr[rbgSubset];
22746 /* Determine number of available RBs in the subset */
22747 usedRbs = allocedInfo->raType1UsedRbs[subsetIdx];
22748 remNumRbs = maskSize - usedRbs;
22750 if (remNumRbs >= rbsReq)
22752 bestNumAvailRbs = rbsReq;
22753 bestSubsetIdx = rbgSubset;
22756 else if (isPartialAlloc && (remNumRbs > bestNumAvailRbs))
22758 bestNumAvailRbs = remNumRbs;
22759 bestSubsetIdx = rbgSubset;
22762 rbgSubset = (rbgSubset + 1) % rbgSize;
22763 } /* End of for (each rbgsubset) */
22765 if (bestNumAvailRbs)
22767 /* Initialize alloced mask and subsetSize depending on the RBG
22768 * subset of allocation */
22770 maskSize = rbgSubsetSzArr[bestSubsetIdx];
22771 allocedMask = allocMask[bestSubsetIdx];
22772 RG_SCH_CMN_DL_GET_START_POS(allocedMask, maskSize,
22774 for (; startIdx < rbgSize; ++startIdx, ++startPos)
22776 for (rbInSubset = startPos; rbInSubset < maskSize;
22777 rbInSubset = rbInSubset + rbgSize)
22779 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
22780 if (!(allocedMask & rbPosInSubset))
22782 raType1Mask[bestSubsetIdx] |= rbPosInSubset;
22783 raType1UsedRbs[bestSubsetIdx]++;
22785 /* Compute RB index value for the RB being allocated */
22786 rbgInSubset = rbInSubset /rbgSize;
22787 offset = rbInSubset % rbgSize;
22788 rbg = (rbgInSubset * rbgSize) + bestSubsetIdx;
22789 rbIdx = (rbg * rbgSize) + offset;
22791 /* Update RBG mask for RA type 0 allocation */
22792 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22793 *rbgMask |= ueRaType0Mask;
22795 /* Update RA type 2 mask */
22796 ueRaType2Mask = rgSCHCmnGetRaType2Mask(rbIdx, &type2MaskIdx);
22797 raType2Mask[type2MaskIdx] |= ueRaType2Mask;
22799 /* Update the counters */
22802 if (numAllocRbs == bestNumAvailRbs)
22807 } /* End of for (each position in the subset mask) */
22808 if (numAllocRbs == bestNumAvailRbs)
22812 } /* End of for startIdx = 0 to rbgSize */
22814 *allocRbgSubset = bestSubsetIdx;
22815 } /* End of if (bestNumAvailRbs) */
22817 return (numAllocRbs);
22821 * @brief Handles RB allocation for Resource allocation type 2
22825 * Function : rgSCHCmnDlRaType2Alloc
22827 * Invoking Module Processing:
22828 * - This function is invoked for DL RB allocation for resource allocation
22831 * Processing Steps:
22832 * - Determine the available positions in the mask
22833 * - Allocate best fit cosecutive RBs.
22834 * - Update RA Type2, RA type 1 and RA type 0 masks.
22836 * @param[in] RgSchDlSfAllocInfo *allocedInfo
22837 * @param[in] U8 rbsReq
22838 * @param[in] RgSchBwRbgInfo *rbgInfo
22839 * @param[out] U8 *rbStart
22840 * @param[out] rgSchDlSfAllocInfo *resAllocInfo
22841 * @param[in] Bool isPartialAlloc
22844 * Number of allocated RBs
22848 PUBLIC U8 rgSCHCmnDlRaType2Alloc
22850 RgSchDlSfAllocInfo *allocedInfo,
22852 RgSchBwRbgInfo *rbgInfo,
22854 RgSchDlSfAllocInfo *resAllocInfo,
22855 Bool isPartialAlloc
22858 PUBLIC U8 rgSCHCmnDlRaType2Alloc(allocedInfo, rbsReq, rbgInfo, rbStart,
22859 resAllocInfo, isPartialAlloc)
22860 RgSchDlSfAllocInfo *allocedInfo;
22862 RgSchBwRbgInfo *rbgInfo;
22864 RgSchDlSfAllocInfo *resAllocInfo;
22865 Bool isPartialAlloc;
22868 U8 numAllocRbs = 0;
22870 U8 rbgSize = rbgInfo->rbgSize;
22871 U32 *rbgMask = &resAllocInfo->raType0Mask;
22872 #ifdef RGSCH_SPS_UNUSED
22873 U32 *raType1Mask = resAllocInfo->raType1Mask;
22875 U32 *raType2Mask = resAllocInfo->raType2Mask;
22876 #ifdef RGSCH_SPS_UNUSED
22877 U32 *raType1UsedRbs = resAllocInfo->raType1UsedRbs;
22879 U32 *allocedMask = allocedInfo->raType2Mask;
22881 /* Note: This function atttempts only full allocation */
22882 rgSCHCmnDlGetBestFitHole(allocedMask, rbgInfo->numRbs,
22883 raType2Mask, rbsReq, rbStart, &numAllocRbs, isPartialAlloc);
22886 /* Update the allocation in RA type 0 and RA type 1 masks */
22887 U8 rbCnt = numAllocRbs;
22888 #ifdef RGSCH_SPS_UNUSED
22897 /* Update RBG mask for RA type 0 allocation */
22898 ueRaType0Mask = rgSCHCmnGetRaType0Mask(rbIdx, rbgSize);
22899 *rbgMask |= ueRaType0Mask;
22901 #ifdef RGSCH_SPS_UNUSED
22902 /* Update RBG mask for RA type 1 */
22903 ueRaType1Mask = rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, &rbgSubset);
22904 raType1Mask[rbgSubset] |= ueRaType1Mask;
22905 raType1UsedRbs[rbgSubset]++;
22907 /* Update the counters */
22913 return (numAllocRbs);
22917 * @brief Determines RA type 0 mask from given RB index.
22921 * Function : rgSCHCmnGetRaType0Mask
22924 * Processing Steps:
22925 * - Determine RA Type 0 mask for given rbIdex and rbg size.
22927 * @param[in] U8 rbIdx
22928 * @param[in] U8 rbgSize
22929 * @return U32 RA type 0 mask
22932 PRIVATE U32 rgSCHCmnGetRaType0Mask
22938 PRIVATE U32 rgSCHCmnGetRaType0Mask(rbIdx, rbgSize)
22944 U32 rbgPosInRbgMask = 0;
22946 rbg = rbIdx/rbgSize;
22947 rbgPosInRbgMask = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbg);
22949 return (rbgPosInRbgMask);
22952 #ifdef RGSCH_SPS_UNUSED
22954 * @brief Determines RA type 1 mask from given RB index.
22958 * Function : rgSCHCmnGetRaType1Mask
22961 * Processing Steps:
22962 * - Determine RA Type 1 mask for given rbIdex and rbg size.
22964 * @param[in] U8 rbIdx
22965 * @param[in] U8 rbgSize
22966 * @param[out] U8 *type1Subset
22967 * @return U32 RA type 1 mask
22970 PRIVATE U32 rgSCHCmnGetRaType1Mask
22977 PRIVATE U32 rgSCHCmnGetRaType1Mask(rbIdx, rbgSize, type1Subset)
22983 U8 rbg, rbgSubset, rbgInSubset, offset, rbInSubset;
22986 rbg = rbIdx/rbgSize;
22987 rbgSubset = rbg % rbgSize;
22988 rbgInSubset = rbg/rbgSize;
22989 offset = rbIdx % rbgSize;
22990 rbInSubset = rbgInSubset * rbgSize + offset;
22991 rbPosInSubset = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbInSubset);
22993 *type1Subset = rbgSubset;
22994 return (rbPosInSubset);
22996 #endif /* RGSCH_SPS_UNUSED */
22998 * @brief Determines RA type 2 mask from given RB index.
23002 * Function : rgSCHCmnGetRaType2Mask
23005 * Processing Steps:
23006 * - Determine RA Type 2 mask for given rbIdx and rbg size.
23008 * @param[in] U8 rbIdx
23009 * @param[out] U8 *maskIdx
23010 * @return U32 RA type 2 mask
23013 PRIVATE U32 rgSCHCmnGetRaType2Mask
23019 PRIVATE U32 rgSCHCmnGetRaType2Mask(rbIdx, maskIdx)
23026 *maskIdx = rbIdx / 32;
23027 rbPosInType2 = 1 << RG_SCH_CMN_DL_GET_POS_FRM_LSB(rbIdx % 32);
23029 return (rbPosInType2);
23033 * @brief Performs resource allocation for a non-SPS UE in SPS bandwidth
23037 * Function : rgSCHCmnAllocUeInSpsBw
23040 * Processing Steps:
23041 * - Determine allocation for the UE.
23042 * - Use resource allocation type 0, 1 and 2 for allocation
23043 * within maximum SPS bandwidth.
23045 * @param[in] RgSchDlSf *dlSf
23046 * @param[in] RgSchCellCb *cell
23047 * @param[in] RgSchUeCb *ue
23048 * @param[in] RgSchDlRbAlloc *rbAllocInfo
23049 * @param[in] Bool isPartialAlloc
23055 PUBLIC Bool rgSCHCmnAllocUeInSpsBw
23060 RgSchDlRbAlloc *rbAllocInfo,
23061 Bool isPartialAlloc
23064 PUBLIC Bool rgSCHCmnAllocUeInSpsBw(dlSf, cell, ue, rbAllocInfo, isPartialAlloc)
23068 RgSchDlRbAlloc *rbAllocInfo;
23069 Bool isPartialAlloc;
23072 U8 rbgSize = cell->rbgSize;
23073 U8 numAllocRbs = 0;
23074 U8 numAllocRbgs = 0;
23076 U8 idx, noLyr, iTbs;
23077 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
23078 RgSchDlSfAllocInfo *dlSfAlloc = &rbAllocInfo->dlSf->dlSfAllocInfo;
23079 RgSchBwRbgInfo *spsRbgInfo = &cell->spsBwRbgInfo;
23081 /* SPS_FIX : Check if this Hq proc is scheduled */
23082 if ((0 == rbAllocInfo->tbInfo[0].schdlngForTb) &&
23083 (0 == rbAllocInfo->tbInfo[1].schdlngForTb))
23088 /* Check if the requirement can be accomodated in SPS BW */
23089 if (dlSf->spsAllocdBw == spsRbgInfo->numRbs)
23091 /* SPS Bandwidth has been exhausted: no further allocations possible */
23094 if (!isPartialAlloc)
23096 if((dlSf->spsAllocdBw + rbAllocInfo->rbsReq) > spsRbgInfo->numRbs)
23102 /* Perform allocation for RA type 0 if rbsReq is multiple of RBG size (also
23103 * if RBG size = 1) */
23104 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23106 rbAllocInfo->rbsReq += (rbgSize - rbAllocInfo->rbsReq % rbgSize);
23107 numAllocRbgs = rgSCHCmnDlRaType0Alloc(dlSfAlloc,
23108 rbAllocInfo->rbsReq, spsRbgInfo, &numAllocRbs,
23109 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23111 #ifdef RGSCH_SPS_UNUSED
23112 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23114 /* If no RBS could be allocated, attempt RA TYPE 1 */
23116 numAllocRbs = rgSCHCmnDlRaType1Alloc(dlSfAlloc,
23117 rbAllocInfo->rbsReq, spsRbgInfo, (U8)dlSfAlloc->nxtRbgSubset,
23118 &rbAllocInfo->allocInfo.raType1.rbgSubset,
23119 &rbAllocInfo->resAllocInfo, isPartialAlloc);
23123 dlSfAlloc->nxtRbgSubset =
23124 (rbAllocInfo->allocInfo.raType1.rbgSubset + 1 ) % rbgSize;
23128 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23130 numAllocRbs = rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23131 rbAllocInfo->rbsReq, spsRbgInfo,
23132 &rbStart, &rbAllocInfo->resAllocInfo, isPartialAlloc);
23139 if (!(rbAllocInfo->pdcch =
23140 rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi,\
23141 rbAllocInfo->dciFormat, FALSE)))
23143 /* Note: Returning TRUE since PDCCH might be available for another UE */
23147 /* Update Tb info for each scheduled TB */
23148 iTbs = rbAllocInfo->tbInfo[0].iTbs;
23149 noLyr = rbAllocInfo->tbInfo[0].noLyr;
23150 rbAllocInfo->tbInfo[0].bytesAlloc =
23151 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;
23153 if (rbAllocInfo->tbInfo[1].schdlngForTb)
23155 iTbs = rbAllocInfo->tbInfo[1].iTbs;
23156 noLyr = rbAllocInfo->tbInfo[1].noLyr;
23157 rbAllocInfo->tbInfo[1].bytesAlloc =
23158 rgTbSzTbl[noLyr - 1][iTbs][numAllocRbs - 1]/8;;
23161 /* Update rbAllocInfo with the allocation information */
23162 if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23164 rbAllocInfo->allocInfo.raType0.dlAllocBitMask =
23165 rbAllocInfo->resAllocInfo.raType0Mask;
23166 rbAllocInfo->allocInfo.raType0.numDlAlloc = numAllocRbgs;
23168 #ifdef RGSCH_SPS_UNUSED
23169 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE1)
23171 rbAllocInfo->allocInfo.raType1.dlAllocBitMask =
23172 rbAllocInfo->resAllocInfo.raType1Mask[rbAllocInfo->allocInfo.raType1.rbgSubset];
23173 rbAllocInfo->allocInfo.raType1.numDlAlloc = numAllocRbs;
23174 rbAllocInfo->allocInfo.raType1.shift = 0;
23177 else if (rbAllocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23179 rbAllocInfo->allocInfo.raType2.isLocal = TRUE;
23180 rbAllocInfo->allocInfo.raType2.rbStart = rbStart;
23181 rbAllocInfo->allocInfo.raType2.numRb = numAllocRbs;
23184 rbAllocInfo->rbsAlloc = numAllocRbs;
23185 rbAllocInfo->tbInfo[0].schdlngForTb = TRUE;
23187 /* Update allocation masks for RA types 0, 1 and 2 in DL SF */
23189 /* Update type 0 allocation mask */
23190 dlSfAlloc->raType0Mask |= rbAllocInfo->resAllocInfo.raType0Mask;
23191 #ifdef RGSCH_SPS_UNUSED
23192 /* Update type 1 allocation masks */
23193 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
23195 dlSfAlloc->raType1Mask[idx] |= rbAllocInfo->resAllocInfo.raType1Mask[idx];
23196 dlSfAlloc->raType1UsedRbs[idx] +=
23197 rbAllocInfo->resAllocInfo.raType1UsedRbs[idx];
23200 /* Update type 2 allocation masks */
23201 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
23203 dlSfAlloc->raType2Mask[idx] |= rbAllocInfo->resAllocInfo.raType2Mask[idx];
23206 dlSf->spsAllocdBw += numAllocRbs;
23210 /***********************************************************
23212 * Func : rgSCHCmnDlGetBestFitHole
23215 * Desc : Converts the best fit hole into allocation and returns the
23216 * allocation information.
23226 **********************************************************/
23228 PRIVATE Void rgSCHCmnDlGetBestFitHole
23232 U32 *crntAllocMask,
23236 Bool isPartialAlloc
23239 PRIVATE Void rgSCHCmnDlGetBestFitHole (allocMask, numMaskRbs,
23240 crntAllocMask, rbsReq, allocStart, allocNumRbs, isPartialAlloc)
23243 U32 *crntAllocMask;
23247 Bool isPartialAlloc;
23250 U8 maskSz = (numMaskRbs + 31)/32;
23251 U8 maxMaskPos = (numMaskRbs % 32);
23252 U8 maskIdx, maskPos;
23253 U8 numAvailRbs = 0;
23254 U8 bestAvailNumRbs = 0;
23255 S8 bestStartPos = -1;
23257 U32 tmpMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23258 U32 bestMask[RG_SCH_NUM_RATYPE2_32BIT_MASK] = {0};
23260 *allocNumRbs = numAvailRbs;
23263 for (maskIdx = 0; maskIdx < maskSz; ++maskIdx)
23266 if (maskIdx == (maskSz - 1))
23268 if (numMaskRbs % 32)
23270 maxMaskPos = numMaskRbs % 32;
23273 for (maskPos = 0; maskPos < maxMaskPos; ++maskPos)
23275 if (!(allocMask[maskIdx] & (1 << (31 - maskPos))))
23277 tmpMask[maskIdx] |= (1 << (31 - maskPos));
23278 if (startPos == -1)
23280 startPos = maskIdx * 32 + maskPos;
23283 if (numAvailRbs == rbsReq)
23285 *allocStart = (U8)startPos;
23286 *allocNumRbs = rbsReq;
23292 if (numAvailRbs > bestAvailNumRbs)
23294 bestAvailNumRbs = numAvailRbs;
23295 bestStartPos = startPos;
23296 cmMemcpy((U8 *)bestMask, (U8 *) tmpMask, 4 * sizeof(U32));
23300 cmMemset((U8 *)tmpMask, 0, 4 * sizeof(U32));
23303 if (*allocNumRbs == rbsReq)
23309 if (*allocNumRbs == rbsReq)
23311 /* Convert the hole into allocation */
23312 cmMemcpy((U8 *)crntAllocMask, (U8 *) tmpMask, 4 * sizeof(U32));
23317 if (bestAvailNumRbs && isPartialAlloc)
23319 /* Partial allocation could have been done */
23320 *allocStart = (U8)bestStartPos;
23321 *allocNumRbs = bestAvailNumRbs;
23322 /* Convert the hole into allocation */
23323 cmMemcpy((U8 *)crntAllocMask, (U8 *) bestMask, 4 * sizeof(U32));
23329 #endif /* LTEMAC_SPS */
23331 /***************************************************************************
23333 * NON-DLFS Allocation functions
23335 * *************************************************************************/
23339 * @brief Function to find out code rate
23343 * Function : rgSCHCmnFindCodeRate
23345 * Processing Steps:
23347 * @param[in] RgSchCellCb *cell
23348 * @param[in] RgSchDlSf *dlSf
23349 * @param[in,out] RgSchDlRbAlloc *allocInfo
23354 PRIVATE Void rgSCHCmnFindCodeRate
23358 RgSchDlRbAlloc *allocInfo,
23362 PRIVATE Void rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,idx)
23365 RgSchDlRbAlloc *allocInfo;
23374 /* Adjust the Imcs and bytes allocated also with respect to the adjusted
23375 RBs - Here we will find out the Imcs by identifying first Highest
23376 number of bits compared to the original bytes allocated. */
23378 * @brief Adjust IMCS according to tbSize and ITBS
23382 * Function : rgSCHCmnNonDlfsPbchTbImcsAdj
23384 * Processing Steps:
23385 * - Adjust Imcs according to tbSize and ITBS.
23387 * @param[in,out] RgSchDlRbAlloc *allocInfo
23388 * @param[in] U8 *idx
23392 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj
23395 RgSchDlRbAlloc *allocInfo,
23400 PRIVATE Void rgSCHCmnNonDlfsPbchTbImcsAdj(cell,allocInfo, idx, rbsReq)
23402 RgSchDlRbAlloc *allocInfo;
23412 RgSchDlSf *dlSf = allocInfo->dlSf;
23414 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23415 noLyrs = allocInfo->tbInfo[idx].noLyr;
23417 if((allocInfo->raType == RG_SCH_CMN_RA_TYPE0))
23419 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + dlSf->lstRbgDfct), cell->rbgSize);
23420 noRbs = (noRbgs * cell->rbgSize) - dlSf->lstRbgDfct;
23424 noRbs = allocInfo->rbsReq;
23427 /* This line will help in case if tbs is zero and reduction in MCS is not possible */
23428 if (allocInfo->rbsReq == 0 )
23432 origBytesReq = rgTbSzTbl[noLyrs - 1][tbs][rbsReq - 1]/8;
23434 /* Find out the ITbs & Imcs by identifying first Highest
23435 number of bits compared to the original bytes allocated.*/
23438 if(((rgTbSzTbl[noLyrs - 1][0][noRbs - 1])/8) < origBytesReq)
23440 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyrs - 1], tbs);
23441 while(((rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1])/8) > origBytesReq)
23450 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][noRbs - 1]/8;
23451 allocInfo->tbInfo[idx].iTbs = tbs;
23452 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23457 /* Added funcion to adjust TBSize*/
23459 * @brief Function to adjust the tbsize in case of subframe 0 & 5 when
23460 * we were not able to do RB alloc adjustment by adding extra required Rbs
23464 * Function : rgSCHCmnNonDlfsPbchTbSizeAdj
23466 * Processing Steps:
23468 * @param[in,out] RgSchDlRbAlloc *allocInfo
23469 * @param[in] U8 numOvrlapgPbchRb
23470 * @param[in] U8 idx
23471 * @param[in] U8 pbchSsRsSym
23475 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj
23477 RgSchDlRbAlloc *allocInfo,
23478 U8 numOvrlapgPbchRb,
23484 PRIVATE Void rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,idx,bytesReq)
23485 RgSchDlRbAlloc *allocInfo;
23486 U8 numOvrlapgPbchRb;
23492 U32 reducedTbs = 0;
23496 noLyrs = allocInfo->tbInfo[idx].noLyr;
23498 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[idx].imcs, tbs);
23500 reducedTbs = bytesReq - (((U32)numOvrlapgPbchRb * (U32)pbchSsRsSym * 6)/8);
23502 /* find out the ITbs & Imcs by identifying first Highest
23503 number of bits compared with reduced bits considering the bits that are
23504 reserved for PBCH/PSS/SSS */
23505 if(((rgTbSzTbl[noLyrs - 1][0][allocInfo->rbsReq - 1])/8) < reducedTbs)
23507 while(((rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1])/8) > reducedTbs)
23516 allocInfo->tbInfo[idx].bytesReq = rgTbSzTbl[noLyrs - 1][tbs][allocInfo->rbsReq - 1]/8;
23517 allocInfo->tbInfo[idx].iTbs = tbs;
23518 RG_SCH_CMN_DL_TBS_TO_MCS(tbs,allocInfo->tbInfo[idx].imcs);
23523 /* Added this function to find num of ovrlapping PBCH rb*/
23525 * @brief Function to find out how many additional rbs are available
23526 * in the entire bw which can be allocated to a UE
23529 * Function : rgSCHCmnFindNumAddtlRbsAvl
23531 * Processing Steps:
23532 * - Calculates number of additinal rbs available
23534 * @param[in] RgSchCellCb *cell
23535 * @param[in] RgSchDlSf *dlSf
23536 * @param[in,out] RgSchDlRbAlloc *allocInfo
23537 * @param[out] U8 addtlRbsAvl
23541 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl
23545 RgSchDlRbAlloc *allocInfo
23548 PRIVATE U8 rgSCHCmnFindNumAddtlRbsAvl(cell,dlSf,allocInfo)
23551 RgSchDlRbAlloc *allocInfo;
23554 U8 addtlRbsAvl = 0;
23556 TRC2(rgSCHCmnFindNumAddtlRbsAvl)
23558 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
23560 addtlRbsAvl = (((dlSf->type0End - dlSf->type2End + 1)*\
23561 cell->rbgSize) - dlSf->lstRbgDfct) - allocInfo->rbsReq;
23563 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
23565 addtlRbsAvl = (dlSf->bw - dlSf->bwAlloced) - allocInfo->rbsReq;
23568 return (addtlRbsAvl);
23571 /* Added this function to find num of ovrlapping PBCH rb*/
23573 * @brief Function to find out how many of the requested RBs are
23574 * falling in the center 6 RBs of the downlink bandwidth.
23577 * Function : rgSCHCmnFindNumPbchOvrlapRbs
23579 * Processing Steps:
23580 * - Calculates number of overlapping rbs
23582 * @param[in] RgSchCellCb *cell
23583 * @param[in] RgSchDlSf *dlSf
23584 * @param[in,out] RgSchDlRbAlloc *allocInfo
23585 * @param[out] U8* numOvrlapgPbchRb
23589 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs
23593 RgSchDlRbAlloc *allocInfo,
23594 U8 *numOvrlapgPbchRb
23597 PRIVATE Void rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,numOvrlapgPbchRb)
23600 RgSchDlRbAlloc *allocInfo;
23601 U8 *numOvrlapgPbchRb;
23604 *numOvrlapgPbchRb = 0;
23605 TRC2(rgSCHCmnFindNumPbchOvrlapRbs)
23606 /*Find if we have already crossed the start boundary for PBCH 6 RBs,
23607 * if yes then lets find the number of RBs which are getting overlapped
23608 * with this allocation.*/
23609 if(dlSf->bwAlloced <= (cell->pbchRbStart))
23611 /*We have not crossed the start boundary of PBCH RBs. Now we need
23612 * to know that if take this allocation then how much PBCH RBs
23613 * are overlapping with this allocation.*/
23614 /* Find out the overlapping RBs in the centre 6 RBs */
23615 if((dlSf->bwAlloced + allocInfo->rbsReq) > cell->pbchRbStart)
23617 *numOvrlapgPbchRb = (dlSf->bwAlloced + allocInfo->rbsReq) - (cell->pbchRbStart);
23618 if(*numOvrlapgPbchRb > 6)
23619 *numOvrlapgPbchRb = 6;
23622 else if ((dlSf->bwAlloced > (cell->pbchRbStart)) &&
23623 (dlSf->bwAlloced < (cell->pbchRbEnd)))
23625 /*We have already crossed the start boundary of PBCH RBs.We need to
23626 * find that if we take this allocation then how much of the RBs for
23627 * this allocation will overlap with PBCH RBs.*/
23628 /* Find out the overlapping RBs in the centre 6 RBs */
23629 if(dlSf->bwAlloced + allocInfo->rbsReq < (cell->pbchRbEnd))
23631 /*If we take this allocation then also we are not crossing the
23632 * end boundary of PBCH 6 RBs.*/
23633 *numOvrlapgPbchRb = allocInfo->rbsReq;
23637 /*If we take this allocation then we are crossing the
23638 * end boundary of PBCH 6 RBs.*/
23639 *numOvrlapgPbchRb = (cell->pbchRbEnd) - dlSf->bwAlloced;
23646 * @brief Performs RB allocation adjustment if the requested RBs are
23647 * falling in the center 6 RBs of the downlink bandwidth.
23650 * Function : rgSCHCmnNonDlfsPbchRbAllocAdj
23652 * Processing Steps:
23653 * - Allocate consecutively available RBs.
23655 * @param[in] RgSchCellCb *cell
23656 * @param[in,out] RgSchDlRbAlloc *allocInfo
23657 * @param[in] U8 pbchSsRsSym
23661 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj
23664 RgSchDlRbAlloc *allocInfo,
23669 PRIVATE Void rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo,pbchSsRsSym)
23671 RgSchDlRbAlloc *allocInfo;
23676 RgSchDlSf *dlSf = allocInfo->dlSf;
23677 U8 numOvrlapgPbchRb = 0;
23678 U8 numOvrlapgAdtlPbchRb = 0;
23680 U8 addtlRbsReq = 0;
23681 U8 moreAddtlRbsReq = 0;
23682 U8 addtlRbsAdd = 0;
23683 U8 moreAddtlRbsAdd = 0;
23691 TRC2(rgSCHCmnNonDlfsPbchRbAllocAdj);
23694 origRbsReq = allocInfo->rbsReq;
23695 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23697 totSym = (cell->isCpDlExtend) ? RGSCH_TOT_NUM_SYM_EXTCP : RGSCH_TOT_NUM_SYM_NORCP;
23699 /* Additional RBs are allocated by considering the loss due to
23700 the reserved symbols for CFICH, PBCH, PSS, SSS and cell specific RS */
23702 divResult = (numOvrlapgPbchRb * pbchSsRsSym)/totSym;
23703 if((numOvrlapgPbchRb * pbchSsRsSym) % totSym)
23707 addtlRbsReq = divResult;
23709 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, addtlRbsReq, addtlRbsAdd)
23711 /*Now RBs requires is original requested RBs + these additional RBs to make
23712 * up for PSS/SSS/BCCH.*/
23713 allocInfo->rbsReq = allocInfo->rbsReq + addtlRbsAdd;
23715 /*Check if with these additional RBs we have taken up, these are also falling
23716 * under PBCH RBs range, if yes then we would need to account for
23717 * PSS/BSS/BCCH for these additional RBs too.*/
23718 if(addtlRbsAdd && ((dlSf->bwAlloced + allocInfo->rbsReq - addtlRbsAdd) < (cell->pbchRbEnd)))
23720 if((dlSf->bwAlloced + allocInfo->rbsReq) <= (cell->pbchRbEnd))
23722 /*With additional RBs taken into account, we are not crossing the
23723 * PBCH RB end boundary.Thus here we need to account just for
23724 * overlapping PBCH RBs for these additonal RBs.*/
23725 divResult = (addtlRbsAdd * pbchSsRsSym)/totSym;
23726 if((addtlRbsAdd * pbchSsRsSym) % totSym)
23731 moreAddtlRbsReq = divResult;
23733 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23735 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23740 /*Here we have crossed the PBCH RB end boundary, thus we need to take
23741 * into account the overlapping RBs for additional RBs which will be
23742 * subset of addtlRbs.*/
23743 numOvrlapgAdtlPbchRb = (cell->pbchRbEnd) - ((dlSf->bwAlloced + allocInfo->rbsReq) - addtlRbsAdd);
23745 divResult = (numOvrlapgAdtlPbchRb * pbchSsRsSym)/totSym;
23746 if((numOvrlapgAdtlPbchRb * pbchSsRsSym) % totSym)
23751 moreAddtlRbsReq = divResult;
23753 RG_SCH_CMN_UPD_RBS_TO_ADD(cell, dlSf, allocInfo, moreAddtlRbsReq, moreAddtlRbsAdd)
23755 allocInfo->rbsReq = allocInfo->rbsReq + moreAddtlRbsAdd;
23758 if (isBcchPcch == TRUE)
23763 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23766 /* This case might be for Imcs value 6 and NPrb = 1 case - Not
23767 Adjusting either RBs or Imcs or Bytes Allocated */
23768 allocInfo->rbsReq = allocInfo->rbsReq - addtlRbsAdd - moreAddtlRbsAdd;
23770 else if(tbs && ((0 == addtlRbsAdd) && (moreAddtlRbsAdd == 0)))
23772 /*In case of a situation where we the entire bandwidth is already occupied
23773 * and we dont have room to add additional Rbs then in order to decrease the
23774 * code rate we reduce the tbsize such that we reduce the present calculated
23775 * tbsize by number of bytes that would be occupied by PBCH/PSS/SSS in overlapping
23776 * rbs and find the nearest tbsize which would be less than this deduced value*/
23778 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23780 noLyr = allocInfo->tbInfo[0].noLyr;
23781 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgTbSzTbl[noLyr - 1], tbs);
23782 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23784 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,bytesReq);
23786 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23788 noLyr = allocInfo->tbInfo[1].noLyr;
23789 bytesReq = rgTbSzTbl[noLyr - 1][tbs][allocInfo->rbsReq - 1]/8;
23790 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,bytesReq);
23794 else if(tbs && ((addtlRbsAdd != addtlRbsReq) ||
23795 (addtlRbsAdd && (moreAddtlRbsReq != moreAddtlRbsAdd))))
23797 /*In case of a situation where we were not able to add required number of
23798 * additional RBs then we adjust the Imcs based on original RBs requested.
23799 * Doing this would comensate for the few extra Rbs we have added but inorder
23800 * to comensate for number of RBS we couldnt add we again do the TBSize adjustment*/
23802 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23804 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23806 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23809 rgSCHCmnFindNumPbchOvrlapRbs(cell,dlSf,allocInfo,&numOvrlapgPbchRb);
23810 numOvrlapgPbchRb = numOvrlapgPbchRb - (addtlRbsAdd + moreAddtlRbsAdd);
23812 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,0,allocInfo->tbInfo[0].bytesReq);
23814 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23816 rgSCHCmnNonDlfsPbchTbSizeAdj(allocInfo,numOvrlapgPbchRb,pbchSsRsSym,1,allocInfo->tbInfo[1].bytesReq);
23822 /*We hit this code when we were able to add the required additional RBS
23823 * hence we should adjust the IMcs based on orignals RBs requested*/
23825 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 0 , origRbsReq);
23827 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
23829 rgSCHCmnNonDlfsPbchTbImcsAdj(cell, allocInfo, 1 , origRbsReq);
23834 } /* end of rgSCHCmnNonDlfsPbchRbAllocAdj */
23838 * @brief Performs RB allocation for frequency non-selective cell.
23842 * Function : rgSCHCmnNonDlfsCmnRbAlloc
23844 * Processing Steps:
23845 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
23847 * @param[in] RgSchCellCb *cell
23848 * @param[in, out] RgSchDlRbAlloc *allocInfo
23854 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc
23857 RgSchDlRbAlloc *allocInfo
23860 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
23862 RgSchDlRbAlloc *allocInfo;
23868 U8 pbchSsRsSym = 0;
23871 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
23873 RgSchDlSf *dlSf = allocInfo->dlSf;
23876 U8 spsRbsAlloc = 0;
23877 RgSchDlSfAllocInfo *dlSfAlloc = &allocInfo->dlSf->dlSfAllocInfo;
23879 TRC2(rgSCHCmnNonDlfsCmnRbAlloc);
23881 allocInfo->tbInfo[0].noLyr = 1;
23884 /* Note: Initialize the masks to 0, this might not be needed since alloInfo
23885 * is initialized to 0 at the beginning of allcoation */
23886 allocInfo->resAllocInfo.raType0Mask = 0;
23887 cmMemset((U8*)allocInfo->resAllocInfo.raType1Mask, 0,
23888 RG_SCH_NUM_RATYPE1_32BIT_MASK * sizeof (U32));
23889 cmMemset((U8*)allocInfo->resAllocInfo.raType2Mask, 0,
23890 RG_SCH_NUM_RATYPE2_32BIT_MASK * sizeof (U32));
23892 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
23893 (dlSf->bwAlloced == dlSf->bw))
23895 if(dlSf->bwAlloced == dlSf->bw)
23901 if (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced))
23904 if ((allocInfo->tbInfo[0].imcs < 29) && (dlSf->bwAlloced < dlSf->bw))
23906 if(allocInfo->tbInfo[0].imcs < 29)
23909 /* set the remaining RBs for the requested UE */
23910 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
23911 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
23912 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[0][tbs][allocInfo->rbsReq - 1]/8;
23917 /* Attempt RA Type 2 allocation in SPS Bandwidth */
23918 if (dlSf->spsAllocdBw < cell->spsBwRbgInfo.numRbs)
23921 rgSCHCmnDlRaType2Alloc(dlSfAlloc,
23922 allocInfo->rbsReq, &cell->spsBwRbgInfo, &rbStart,
23923 &allocInfo->resAllocInfo, FALSE);
23924 /* rbsAlloc assignment moved from line 16671 to here to avoid
23925 * compilation error. Recheck */
23926 dlSf->spsAllocdBw += spsRbsAlloc;
23929 #endif /* LTEMAC_SPS */
23937 /* Update allocation information */
23938 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
23939 if (allocInfo->pdcch == NULLP)
23943 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
23944 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
23945 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
23946 allocInfo->allocInfo.raType2.isLocal = TRUE;
23950 allocInfo->allocInfo.raType2.rbStart = rbStart;
23951 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
23952 allocInfo->rbsAlloc = allocInfo->rbsReq;
23963 if(!(dlSf->sfNum == 5))
23965 /* case for subframes 1 to 9 except 5 */
23967 allocInfo->allocInfo.raType2.rbStart = rbStart;
23969 /*Fix for ccpu00123918*/
23970 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
23975 pbchFrame = 1; /* case for subframe 5 */
23976 /* In subframe 5, symbols are reserved for PSS and SSS and CFICH
23977 and Cell Specific Reference Signals */
23978 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PSS_SSS_SYM) *
23979 RGSCH_NUM_SC_IN_RB + cell->numCellRSPerSf);
23985 /* In subframe 0, symbols are reserved for PSS, SSS, PBCH, CFICH and
23986 and Cell Specific Reference signals */
23987 pbchSsRsSym = (((cellDl->currCfi) + RGSCH_NUM_PBCH_SYM +
23988 RGSCH_NUM_PSS_SSS_SYM) * RGSCH_NUM_SC_IN_RB +
23989 cell->numCellRSPerSf);
23990 } /* end of outer else */
23993 (((dlSf->bwAlloced + allocInfo->rbsReq) - cell->pbchRbStart) > 0)&&
23994 (dlSf->bwAlloced < cell->pbchRbEnd))
23996 if(allocInfo->tbInfo[0].imcs < 29)
23998 rgSCHCmnNonDlfsPbchRbAllocAdj(cell, allocInfo, pbchSsRsSym, TRUE);
24010 /*Fix for ccpu00123918*/
24011 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24012 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24013 allocInfo->rbsAlloc = allocInfo->rbsReq;
24015 /* LTE_ADV_FLAG_REMOVED_START */
24017 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
24019 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
24020 allocInfo->allocInfo.raType2.rbStart, \
24021 allocInfo->allocInfo.raType2.numRb);
24026 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
24027 allocInfo->allocInfo.raType2.rbStart, \
24028 allocInfo->allocInfo.raType2.numRb);
24034 /* LTE_ADV_FLAG_REMOVED_END */
24035 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24042 /* Update type 0, 1 and 2 masks */
24043 dlSfAlloc->raType0Mask |= allocInfo->resAllocInfo.raType0Mask;
24044 #ifdef RGSCH_SPS_UNUSED
24045 for (idx = 0; idx < RG_SCH_NUM_RATYPE1_32BIT_MASK; ++idx)
24047 dlSfAlloc->raType1Mask[idx] |=
24048 allocInfo->resAllocInfo.raType1Mask[idx];
24049 dlSfAlloc->raType1UsedRbs[idx] +=
24050 allocInfo->resAllocInfo.raType1UsedRbs[idx];
24053 for (idx = 0; idx < RG_SCH_NUM_RATYPE2_32BIT_MASK; ++idx)
24055 dlSfAlloc->raType2Mask[idx] |=
24056 allocInfo->resAllocInfo.raType2Mask[idx];
24066 * @brief Performs RB allocation for frequency non-selective cell.
24070 * Function : rgSCHCmnNonDlfsCmnRbAllocRar
24072 * Processing Steps:
24073 * - Allocate consecutively available RBs for BCCH/PCCH/RAR.
24075 * @param[in] RgSchCellCb *cell
24076 * @param[in, out] RgSchDlRbAlloc *allocInfo
24082 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAllocRar
24085 RgSchDlRbAlloc *allocInfo
24088 PRIVATE S16 rgSCHCmnNonDlfsCmnRbAlloc(cell, allocInfo)
24090 RgSchDlRbAlloc *allocInfo;
24093 RgSchDlSf *dlSf = allocInfo->dlSf;
24094 TRC2(rgSCHCmnNonDlfsCmnRbAllocRar);
24097 if(dlSf->bwAlloced == dlSf->bw)
24102 allocInfo->tbInfo[0].noLyr = 1;
24104 /* Update allocation information */
24105 allocInfo->pdcch = rgSCHCmnCmnPdcchAlloc(cell, dlSf);
24106 if (allocInfo->pdcch == NULLP)
24110 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
24111 allocInfo->pdcch->dciNumOfBits = cell->dciSize.size[TFU_DCI_FORMAT_1A];
24112 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
24113 allocInfo->allocInfo.raType2.isLocal = TRUE;
24115 /*Fix for ccpu00123918*/
24116 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
24117 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
24118 allocInfo->rbsAlloc = allocInfo->rbsReq;
24120 /* LTE_ADV_FLAG_REMOVED_END */
24121 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24124 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, NULLP, dlSf, 13, TFU_DCI_FORMAT_B1, FALSE);
24125 if (allocInfo->pdcch == NULLP)
24129 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
24130 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
24132 printf("5GTF_ERROR vrbg allocated > 25\n");
24136 allocInfo->tbInfo[0].cmnGrnt.vrbgStart = beamInfo->vrbgStart;
24137 allocInfo->tbInfo[0].cmnGrnt.numVrbg = allocInfo->vrbgReq;
24139 /* Update allocation information */
24140 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
24142 allocInfo->tbInfo[0].cmnGrnt.xPDSCHRange = 1;
24143 allocInfo->tbInfo[0].cmnGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
24144 allocInfo->tbInfo[0].cmnGrnt.vrbgStart, allocInfo->tbInfo[0].cmnGrnt.numVrbg);
24146 allocInfo->tbInfo[0].cmnGrnt.rbStrt = (allocInfo->tbInfo[0].cmnGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
24147 allocInfo->tbInfo[0].cmnGrnt.numRb = (allocInfo->tbInfo[0].cmnGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
24149 beamInfo->vrbgStart += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24150 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].cmnGrnt.numVrbg;
24151 allocInfo->tbInfo[0].cmnGrnt.rv = 0;
24152 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
24155 printf("\n[%s],allocInfo->tbInfo[0].bytesAlloc:%u,vrbgReq:%u\n",
24156 __func__,allocInfo->tbInfo[0].bytesAlloc,allocInfo->vrbgReq);
24162 /* LTE_ADV_FLAG_REMOVED_START */
24165 * @brief To check if DL BW available for non-DLFS allocation.
24169 * Function : rgSCHCmnNonDlfsBwAvlbl
24171 * Processing Steps:
24172 * - Determine availability based on RA Type.
24174 * @param[in] RgSchCellCb *cell
24175 * @param[in] RgSchDlSf *dlSf
24176 * @param[in] RgSchDlRbAlloc *allocInfo
24184 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl
24187 RgSchSFRPoolInfo **sfrpoolInfo,
24189 RgSchDlRbAlloc *allocInfo,
24193 PRIVATE Bool rgSCHCmnNonDlfsSFRBwAvlbl(cell, sfrpoolInfo, dlSf, allocInfo, isUeCellEdge)
24195 RgSchSFRPoolInfo **sfrpoolInfo;
24197 RgSchDlRbAlloc *allocInfo;
24205 RgSchSFRPoolInfo *sfrPool;
24206 RgSchSFRPoolInfo *sfrCEPool;
24210 RgSchSFRPoolInfo *poolWithMaxAvlblBw = NULLP;
24212 U32 addtnlPRBs = 0;
24214 if (dlSf->bw <= dlSf->bwAlloced)
24216 RLOG_ARG2(L_ERROR,DBG_CELLID,cell->cellId,
24217 "BW is fully allocated for subframe (%d) CRNTI:%d", dlSf->sfNum,allocInfo->rnti);
24221 if (dlSf->sfrTotalPoolInfo.ccBwFull == TRUE)
24223 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24224 "BW is fully allocated for CC Pool CRNTI:%d",allocInfo->rnti);
24228 if ((dlSf->sfrTotalPoolInfo.ceBwFull == TRUE) && (isUeCellEdge))
24230 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,
24231 "BW is fully allocated for CE Pool CRNTI:%d",allocInfo->rnti);
24235 /* We first check if the ue scheduled is a cell edge or cell centre and accordingly check the avaialble
24236 memory in their pool. If the cell centre UE doesnt have Bw available in its pool, then it will check
24237 Bw availability in cell edge pool but the other way around is NOT possible. */
24240 l = &dlSf->sfrTotalPoolInfo.cePool;
24244 l = &dlSf->sfrTotalPoolInfo.ccPool;
24247 n = cmLListFirst(l);
24251 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24253 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24255 /* MS_FIX for ccpu00123919 : Number of RBs in case of RETX should be same as that of initial transmission. */
24256 if(allocInfo->tbInfo[0].tbCb->txCntr)
24258 /* If RB assignment is being done for RETX. Then if reqRbs are a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24259 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24260 if (allocInfo->rbsReq % cell->rbgSize == 0)
24262 if ((sfrPool->type2End == dlSf->type2End) && dlSf->lstRbgDfct)
24264 /* In this scenario we are wasting the last RBG for this dlSf */
24265 sfrPool->type0End--;
24266 sfrPool->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24268 dlSf->lstRbgDfct = 0;
24270 /*ABHINAV To check if these variables need to be taken care of*/
24272 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24277 if (dlSf->lstRbgDfct)
24279 /* Check if type0 allocation can cater to this RETX requirement */
24280 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24286 if (sfrPool->type2End != dlSf->type2End) /*Search again for some pool which has the END RBG of the BandWidth*/
24294 /* cannot allocate same number of required RBs */
24300 /*rg002.301 ccpu00120391 MOD condition is modified approprialtely to find if rbsReq is less than available RBS*/
24301 if(allocInfo->rbsReq <= (((sfrPool->type0End - sfrPool->type2End + 1)*\
24302 cell->rbgSize) - dlSf->lstRbgDfct))
24304 *sfrpoolInfo = sfrPool;
24309 if (sfrPool->bw <= sfrPool->bwAlloced + cell->rbgSize)
24311 n = cmLListNext(l);
24312 /* If the ue is cell centre then it will simply check the memory available in next pool.
24313 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24315 if((!isUeCellEdge) && (!n->node))
24317 l = &dlSf->sfrTotalPoolInfo.cePool;
24318 n = cmLListFirst(l);
24324 /* MS_FIX: Number of RBs in case of RETX should be same as that of initial transmission */
24325 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24327 /*rg002.301 ccpu00120391 MOD setting the remaining RBs for the requested UE*/
24328 allocInfo->rbsReq = (((sfrPool->type0End - sfrPool->type2End + 1)*\
24329 cell->rbgSize) - dlSf->lstRbgDfct);
24330 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24331 noLyrs = allocInfo->tbInfo[0].noLyr;
24332 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24333 *sfrpoolInfo = sfrPool;
24338 n = cmLListNext(l);
24340 /* If the ue is cell centre then it will simply check the memory available in next pool.
24341 But if there are no more memory pools available, then cell centre Ue will try to look for memory in cell edge pool */
24342 if((!isUeCellEdge) && (!n->node))
24344 l = &dlSf->sfrTotalPoolInfo.cePool;
24345 n = cmLListFirst(l);
24354 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24356 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24357 /* This is a Case where a UE was CC and had more RBs allocated than present in CE pool.
24358 In case this UE whn become CE with retx going on, then BW is not sufficient for Retx */
24359 if ((isUeCellEdge) &&
24360 (allocInfo->tbInfo[0].tbCb->txCntr != 0))
24362 if(allocInfo->rbsReq > (sfrPool->bw - sfrPool->bwAlloced))
24364 /* Adjust CE BW such that Retx alloc is successful */
24365 /* Check if merging CE with adjacent CC pool will be sufficient to process Retx */
24367 /* If no Type 0 allocations are made from this pool */
24368 if (sfrPool->type0End == (((sfrPool->poolendRB + 1) / cell->rbgSize) - 1))
24370 if (sfrPool->adjCCPool &&
24371 (sfrPool->adjCCPool->type2Start == sfrPool->poolendRB + 1) &&
24372 (allocInfo->rbsReq <= ((sfrPool->bw - sfrPool->bwAlloced) +
24373 ((sfrPool->adjCCPool->bw - sfrPool->adjCCPool->bwAlloced)))))
24375 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24377 /* Adjusting CE Pool Info */
24378 sfrPool->bw += addtnlPRBs;
24379 sfrPool->type0End = ((sfrPool->poolendRB + addtnlPRBs + 1) /
24380 cell->rbgSize) - 1;
24382 /* Adjusting CC Pool Info */
24383 sfrPool->adjCCPool->type2Start += addtnlPRBs;
24384 sfrPool->adjCCPool->type2End = RGSCH_CEIL(sfrPool->adjCCPool->type2Start,
24386 sfrPool->adjCCPool->bw -= addtnlPRBs;
24387 *sfrpoolInfo = sfrPool;
24394 /* Check if CC pool is one of the following:
24395 * 1. |CE| + |CC "CCPool2Exists" = TRUE|
24396 * 2. |CC "CCPool2Exists" = FALSE| + |CE| + |CC "CCPool2Exists" = TRUE|
24398 if(TRUE == sfrPool->CCPool2Exists)
24400 l1 = &dlSf->sfrTotalPoolInfo.cePool;
24401 n1 = cmLListFirst(l1);
24402 sfrCEPool = (RgSchSFRPoolInfo*)(n1->node);
24403 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced))
24405 *sfrpoolInfo = sfrCEPool;
24408 else if(allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24410 *sfrpoolInfo = sfrPool;
24413 /* Check if CE and CC boundary has unallocated prbs */
24414 else if ((sfrPool->poolstartRB == sfrPool->type2Start) &&
24415 (sfrCEPool->type0End == ((sfrCEPool->poolendRB + 1) / cell->rbgSize) - 1))
24417 if(allocInfo->rbsReq <= (sfrCEPool->bw - sfrCEPool->bwAlloced) +
24418 (sfrPool->bw - sfrPool->bwAlloced))
24420 /* Checking if BW can be allocated partly from CE pool and partly
24423 addtnlPRBs = allocInfo->rbsReq - (sfrPool->bw - sfrPool->bwAlloced);
24424 /* Updating CE and CC type2 parametrs based on the RBs allocated
24425 * from these pools*/
24426 sfrPool->type2Start -= addtnlPRBs;
24427 sfrPool->type2End = RGSCH_CEIL(sfrPool->type2Start, cell->rbgSize);
24428 sfrPool->bw += addtnlPRBs;
24429 if (addtnlPRBs == (sfrCEPool->bw - sfrCEPool->bwAlloced))
24431 sfrCEPool->bwAlloced = sfrCEPool->bw;
24432 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24436 sfrCEPool->bw -= addtnlPRBs;
24437 sfrCEPool->type0End = ((sfrCEPool->poolendRB + 1 - addtnlPRBs) / cell->rbgSize) - 1;
24439 *sfrpoolInfo = sfrPool;
24442 else if ( bwAvlbl <
24443 ((sfrCEPool->bw - sfrCEPool->bwAlloced) +
24444 (sfrPool->bw - sfrPool->bwAlloced)))
24446 /* All the Prbs from CE BW shall be allocated */
24447 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24449 sfrPool->type2Start = sfrCEPool->type2Start;
24450 sfrPool->bw += sfrCEPool->bw - sfrCEPool->bwAlloced;
24451 sfrCEPool->type2Start = sfrCEPool->poolendRB + 1;
24452 sfrCEPool->bwAlloced = sfrCEPool->bw;
24453 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24455 /* set the remaining RBs for the requested UE */
24456 allocInfo->rbsReq = (sfrPool->bw - sfrPool->bwAlloced);
24457 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24458 noLyrs = allocInfo->tbInfo[0].noLyr;
24459 allocInfo->tbInfo[0].bytesReq =
24460 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24461 *sfrpoolInfo = sfrPool;
24472 /* Checking if no. of RBs required can be allocated from
24474 * 1. If available return the SFR pool.
24475 * 2. Else update the RBs required parameter based on the
24476 * BW available in the pool
24477 * 3. Return FALSE if no B/W is available.
24479 if (allocInfo->rbsReq <= (sfrPool->bw - sfrPool->bwAlloced))
24481 *sfrpoolInfo = sfrPool;
24486 if(allocInfo->tbInfo[0].tbCb->txCntr == 0)
24488 if (bwAvlbl < sfrPool->bw - sfrPool->bwAlloced)
24492 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24494 bwAvlbl = sfrPool->bw - sfrPool->bwAlloced;
24495 poolWithMaxAvlblBw = sfrPool;
24497 n = cmLListNext(l);
24499 if ((isUeCellEdge == FALSE) && (n == NULLP))
24501 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24503 l = &dlSf->sfrTotalPoolInfo.cePool;
24504 n = cmLListFirst(l);
24514 dlSf->sfrTotalPoolInfo.ceBwFull = TRUE;
24518 dlSf->sfrTotalPoolInfo.ccBwFull = TRUE;
24524 /* set the remaining RBs for the requested UE */
24525 allocInfo->rbsReq = poolWithMaxAvlblBw->bw -
24526 poolWithMaxAvlblBw->bwAlloced;
24527 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24528 noLyrs = allocInfo->tbInfo[0].noLyr;
24529 allocInfo->tbInfo[0].bytesReq =
24530 rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24531 *sfrpoolInfo = poolWithMaxAvlblBw;
24538 n = cmLListNext(l);
24540 if ((isUeCellEdge == FALSE) && (n == NULLP))
24542 if(l != &dlSf->sfrTotalPoolInfo.cePool)
24544 l = &dlSf->sfrTotalPoolInfo.cePool;
24545 n = cmLListFirst(l);
24561 #endif /* end of ifndef LTE_TDD*/
24562 /* LTE_ADV_FLAG_REMOVED_END */
24565 * @brief To check if DL BW available for non-DLFS allocation.
24569 * Function : rgSCHCmnNonDlfsUeRbAlloc
24571 * Processing Steps:
24572 * - Determine availability based on RA Type.
24574 * @param[in] RgSchCellCb *cell
24575 * @param[in] RgSchDlSf *dlSf
24576 * @param[in] RgSchDlRbAlloc *allocInfo
24584 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl
24588 RgSchDlRbAlloc *allocInfo
24591 PRIVATE Bool rgSCHCmnNonDlfsBwAvlbl(cell, dlSf, allocInfo)
24594 RgSchDlRbAlloc *allocInfo;
24599 U8 ignoredDfctRbg = FALSE;
24601 TRC2(rgSCHCmnNonDlfsBwAvlbl);
24602 if (dlSf->bw <= dlSf->bwAlloced)
24604 RLOG_ARG3(L_DEBUG,DBG_CELLID,cell->cellId, "(%d:%d)FAILED CRNTI:%d",
24605 dlSf->bw, dlSf->bwAlloced,allocInfo->rnti);
24608 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
24610 /* Fix for ccpu00123919 : Number of RBs in case of RETX should be same as
24611 * that of initial transmission. */
24612 if(allocInfo->tbInfo[0].tbCb->txCntr)
24614 /* If RB assignment is being done for RETX. Then if reqRbs are
24615 * a multiple of rbgSize then ignore lstRbgDfct. If reqRbs is
24616 * not a multiple of rbgSize then check if lsgRbgDfct exists */
24617 if (allocInfo->rbsReq % cell->rbgSize == 0)
24619 if (dlSf->lstRbgDfct)
24621 /* In this scenario we are wasting the last RBG for this dlSf */
24624 dlSf->bwAlloced += (cell->rbgSize - dlSf->lstRbgDfct);
24625 /* Fix: MUE_PERTTI_DL */
24626 dlSf->lstRbgDfct = 0;
24627 ignoredDfctRbg = TRUE;
24633 if (dlSf->lstRbgDfct)
24635 /* Check if type0 allocation can cater to this RETX requirement */
24636 if ((allocInfo->rbsReq % cell->rbgSize) != (cell->rbgSize - dlSf->lstRbgDfct))
24643 /* cannot allocate same number of required RBs */
24649 /* Condition is modified approprialtely to find
24650 * if rbsReq is less than available RBS*/
24651 if(allocInfo->rbsReq <= (((dlSf->type0End - dlSf->type2End + 1)*\
24652 cell->rbgSize) - dlSf->lstRbgDfct))
24656 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24657 * allocation in TDD when requested RBs are more than available RBs*/
24660 /* MS_WORKAROUND for ccpu00122022 */
24661 if (dlSf->bw < dlSf->bwAlloced + cell->rbgSize)
24663 /* ccpu00132358- Re-assigning the values which were updated above
24664 * if it is RETX and Last RBG available*/
24665 if(ignoredDfctRbg == TRUE)
24668 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24669 dlSf->lstRbgDfct = 1;
24675 /* Fix: Number of RBs in case of RETX should be same as
24676 * that of initial transmission. */
24677 if(allocInfo->tbInfo[0].tbCb->txCntr == 0
24679 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24683 /* Setting the remaining RBs for the requested UE*/
24684 allocInfo->rbsReq = (((dlSf->type0End - dlSf->type2End + 1)*\
24685 cell->rbgSize) - dlSf->lstRbgDfct);
24686 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24687 noLyrs = allocInfo->tbInfo[0].noLyr;
24688 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24689 /* DwPts Scheduling Changes Start */
24691 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24693 allocInfo->tbInfo[0].bytesReq =
24694 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24697 /* DwPts Scheduling Changes End */
24701 /* ccpu00132358- Re-assigning the values which were updated above
24702 * if it is RETX and Last RBG available*/
24703 if(ignoredDfctRbg == TRUE)
24706 dlSf->bwAlloced -= (cell->rbgSize - dlSf->lstRbgDfct);
24707 dlSf->lstRbgDfct = 1;
24710 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "FAILED for CRNTI:%d",
24712 printf ("RB Alloc failed for LAA TB type 0\n");
24718 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
24720 if (allocInfo->rbsReq <= (dlSf->bw - dlSf->bwAlloced))
24724 /* ccpu00132358:MOD- Removing "ifndef LTE_TDD" for unblocking the RB
24725 * allocation in TDD when requested RBs are more than available RBs*/
24728 /* Fix: Number of RBs in case of RETX should be same as
24729 * that of initial transmission. */
24730 if((allocInfo->tbInfo[0].tbCb->txCntr == 0)
24732 && (FALSE == rgSCHLaaIsLaaTB(allocInfo))
24736 /* set the remaining RBs for the requested UE */
24737 allocInfo->rbsReq = dlSf->bw - dlSf->bwAlloced;
24738 RG_SCH_CMN_DL_MCS_TO_TBS(allocInfo->tbInfo[0].imcs, tbs);
24739 noLyrs = allocInfo->tbInfo[0].noLyr;
24740 allocInfo->tbInfo[0].bytesReq = rgTbSzTbl[noLyrs-1][tbs][allocInfo->rbsReq - 1]/8;
24741 /* DwPts Scheduling Changes Start */
24743 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
24745 allocInfo->tbInfo[0].bytesReq =
24746 rgTbSzTbl[noLyrs-1][tbs][RGSCH_MAX(allocInfo->rbsReq*3/4,1) - 1]/8;
24749 /* DwPts Scheduling Changes End */
24753 printf ("RB Alloc failed for LAA TB type 2\n");
24754 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24757 /* Fix: Number of RBs in case of RETX should be same as
24758 * that of initial transmission. */
24762 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"FAILED for CRNTI:%d",allocInfo->rnti);
24766 /* LTE_ADV_FLAG_REMOVED_START */
24769 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24773 * Function : rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24775 * Processing Steps:
24777 * @param[in] RgSchCellCb *cell
24778 * @param[in] RgSchDlSf *dlSf
24779 * @param[in] U8 rbStrt
24780 * @param[in] U8 numRb
24785 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc
24793 PUBLIC Void rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
24802 RgSchSFRPoolInfo *sfrPool;
24803 TRC2(rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc);
24805 l = &dlSf->sfrTotalPoolInfo.ccPool;
24807 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24808 dlSf->bwAlloced += numRb;
24809 dlSf->type2Start += numRb;
24810 n = cmLListFirst(l);
24814 sfrPool = (RgSchSFRPoolInfo*)(n->node);
24815 n = cmLListNext(l);
24817 /* If the pool contains some RBs allocated in this allocation, e.g: Pool is [30.50]. Pool->type2Start is 40 , dlSf->type2Start is 45. then update the variables in pool */
24818 if((sfrPool->poolendRB >= dlSf->type2Start) && (sfrPool->type2Start < dlSf->type2Start))
24820 sfrPool->type2End = dlSf->type2End;
24821 sfrPool->bwAlloced = dlSf->type2Start - sfrPool->poolstartRB;
24822 sfrPool->type2Start = dlSf->type2Start;
24826 /* If the pool contains all RBs allocated in this allocation*/
24827 if(dlSf->type2Start > sfrPool->poolendRB)
24829 sfrPool->type2End = sfrPool->type0End + 1;
24830 sfrPool->bwAlloced = sfrPool->bw;
24831 sfrPool->type2Start = sfrPool->poolendRB + 1;
24836 if (l != &dlSf->sfrTotalPoolInfo.cePool)
24838 l = &dlSf->sfrTotalPoolInfo.cePool;
24839 n = cmLListFirst(l);
24849 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24853 * Function : rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24855 * Processing Steps:
24857 * @param[in] RgSchCellCb *cell
24858 * @param[in] RgSchDlSf *dlSf
24859 * @param[in] U8 rbStrt
24860 * @param[in] U8 numRb
24866 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc
24875 PRIVATE S16 rgSCHCmnNonDlfsUpdDSFRTyp2Alloc(cell, ue, dlSf, rbStrt, numRb)
24885 RgSchSFRPoolInfo *sfrCCPool1 = NULL;
24886 RgSchSFRPoolInfo *sfrCCPool2 = NULL;
24889 TRC2(rgSCHCmnNonDlfsUpdDSFRTyp2Alloc);
24890 /* Move the type2End pivot forward */
24893 l = &dlSf->sfrTotalPoolInfo.ccPool;
24894 n = cmLListFirst(l);
24897 sfrCCPool1 = (RgSchSFRPoolInfo*)(n->node);
24899 if (sfrCCPool1 == NULLP)
24901 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24902 "sfrCCPool1 is NULL for CRNTI:%d",ue->ueId);
24905 n = cmLListNext(l);
24908 sfrCCPool2 = (RgSchSFRPoolInfo*)(n->node);
24909 n = cmLListNext(l);
24911 if((sfrCCPool1) && (sfrCCPool2))
24913 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
24914 if(((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24915 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb)) ||
24916 ((dlSf->type2Start >= sfrCCPool2->pwrHiCCRange.startRb) &&
24917 (dlSf->type2Start + numRb < sfrCCPool2->pwrHiCCRange.endRb)))
24919 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24921 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24922 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24925 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24926 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
24933 if((dlSf->type2Start >= sfrCCPool1->pwrHiCCRange.startRb) &&
24934 (dlSf->type2Start + numRb < sfrCCPool1->pwrHiCCRange.endRb))
24936 ue->lteAdvUeCb.isCCUePHigh = TRUE;
24938 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
24939 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, dlSf->type2Start, numRb, dlSf->bw);
24942 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdDSFRTyp2Alloc():"
24943 "rgSCHCmnBuildRntpInfo() function returned RFAILED CRNTI:%d",ue->ueId);
24949 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24951 dlSf->bwAlloced += numRb;
24952 /*MS_FIX for ccpu00123918*/
24953 dlSf->type2Start += numRb;
24959 #endif /* end of ifndef LTE_TDD*/
24960 /* LTE_ADV_FLAG_REMOVED_END */
24962 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
24966 * Function : rgSCHCmnNonDlfsUpdTyp2Alloc
24968 * Processing Steps:
24970 * @param[in] RgSchCellCb *cell
24971 * @param[in] RgSchDlSf *dlSf
24972 * @param[in] U8 rbStrt
24973 * @param[in] U8 numRb
24978 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc
24986 PRIVATE Void rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, rbStrt, numRb)
24993 TRC2(rgSCHCmnNonDlfsUpdTyp2Alloc);
24994 /* Move the type2End pivot forward */
24995 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
24996 //#ifndef LTEMAC_SPS
24997 dlSf->bwAlloced += numRb;
24998 /*Fix for ccpu00123918*/
24999 dlSf->type2Start += numRb;
25005 * @brief To do DL allocation using TYPE0 RA.
25009 * Function : rgSCHCmnNonDlfsType0Alloc
25011 * Processing Steps:
25012 * - Perform TYPE0 allocation using the RBGs between
25013 * type0End and type2End.
25014 * - Build the allocation mask as per RBG positioning.
25015 * - Update the allocation parameters.
25017 * @param[in] RgSchCellCb *cell
25018 * @param[in] RgSchDlSf *dlSf
25019 * @param[in] RgSchDlRbAlloc *allocInfo
25025 PRIVATE Void rgSCHCmnNonDlfsType0Alloc
25029 RgSchDlRbAlloc *allocInfo,
25033 PRIVATE Void rgSCHCmnNonDlfsType0Alloc(cell, dlSf, allocInfo, dlUe)
25036 RgSchDlRbAlloc *allocInfo;
25040 U32 dlAllocMsk = 0;
25041 U8 rbgFiller = dlSf->lstRbgDfct;
25042 U8 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25043 //U8 noRbgs = (allocInfo->rbsReq + rbgFiller)/ cell->rbgSize;
25047 U32 tb1BytesAlloc = 0;
25048 U32 tb2BytesAlloc = 0;
25049 RgSchCmnDlUe *dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25051 TRC2(rgSCHCmnNonDlfsType0Alloc);
25052 //if(noRbgs == 0) noRbgs = 1; /* Not required as ceilling is used above*/
25054 /* Fix for ccpu00123919*/
25055 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25056 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25062 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25065 /* Fix for ccpu00138701: Ceilling is using to derive num of RBGs, Therefore,
25066 * after this operation,checking Max TB size and Max RBs are not crossed
25067 * if it is crossed then decrement num of RBGs. */
25068 //if((noRbs + rbgFiller) % cell->rbgSize)
25069 if((noRbs > allocInfo->rbsReq) &&
25070 (allocInfo->rbsReq + rbgFiller) % cell->rbgSize)
25071 {/* considering ue category limitation
25072 * due to ceiling */
25075 if (rgSCHLaaIsLaaTB(allocInfo)== FALSE)
25078 if ((allocInfo->tbInfo[0].schdlngForTb) && (!allocInfo->tbInfo[0].tbCb->txCntr))
25080 iTbs = allocInfo->tbInfo[0].iTbs;
25081 noLyr = allocInfo->tbInfo[0].noLyr;
25082 tb1BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25085 if ((allocInfo->tbInfo[1].schdlngForTb) && (!allocInfo->tbInfo[1].tbCb->txCntr))
25087 iTbs = allocInfo->tbInfo[1].iTbs;
25088 noLyr = allocInfo->tbInfo[1].noLyr;
25089 tb2BytesAlloc = rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25093 /* Only Check for New Tx No need for Retx */
25094 if (tb1BytesAlloc || tb2BytesAlloc)
25096 if (( ue->dl.aggTbBits >= dlUe->maxTbBits) ||
25097 (tb1BytesAlloc >= dlUe->maxTbSz/8) ||
25098 (tb2BytesAlloc >= dlUe->maxTbSz/8) ||
25099 (noRbs >= dlUe->maxRb))
25105 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25109 /* type0End would have been initially (during subfrm Init) at the bit position
25110 * (cell->noOfRbgs - 1), 0 being the most significant.
25111 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25112 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - dlSf->type0End));
25113 /* Move backwards the type0End pivot */
25114 dlSf->type0End -= noRbgs;
25115 /*Fix for ccpu00123919*/
25116 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25117 /* Update the bwAlloced field accordingly */
25118 //#ifndef LTEMAC_SPS /* ccpu00129474*/
25119 dlSf->bwAlloced += noRbs;
25121 /* Update Type0 Alloc Info */
25122 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25123 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25124 allocInfo->rbsAlloc = noRbs;
25126 /* Update Tb info for each scheduled TB */
25127 iTbs = allocInfo->tbInfo[0].iTbs;
25128 noLyr = allocInfo->tbInfo[0].noLyr;
25129 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25130 * RETX TB Size is same as Init TX TB Size */
25131 if (allocInfo->tbInfo[0].tbCb->txCntr)
25133 allocInfo->tbInfo[0].bytesAlloc =
25134 allocInfo->tbInfo[0].bytesReq;
25138 allocInfo->tbInfo[0].bytesAlloc =
25139 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25140 /* DwPts Scheduling Changes Start */
25142 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25144 allocInfo->tbInfo[0].bytesAlloc =
25145 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25148 /* DwPts Scheduling Changes End */
25151 if (allocInfo->tbInfo[1].schdlngForTb)
25153 iTbs = allocInfo->tbInfo[1].iTbs;
25154 noLyr = allocInfo->tbInfo[1].noLyr;
25155 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25156 * RETX TB Size is same as Init TX TB Size */
25157 if (allocInfo->tbInfo[1].tbCb->txCntr)
25159 allocInfo->tbInfo[1].bytesAlloc =
25160 allocInfo->tbInfo[1].bytesReq;
25164 allocInfo->tbInfo[1].bytesAlloc =
25165 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25166 /* DwPts Scheduling Changes Start */
25168 if (dlSf->sfType == RG_SCH_SPL_SF_DATA)
25170 allocInfo->tbInfo[1].bytesAlloc =
25171 rgTbSzTbl[noLyr - 1][iTbs][RGSCH_MAX(noRbs*3/4,1) - 1]/8;
25174 /* DwPts Scheduling Changes End */
25178 /* The last RBG which can be smaller than the RBG size is consedered
25179 * only for the first time allocation of TYPE0 UE */
25180 dlSf->lstRbgDfct = 0;
25187 * @brief To prepare RNTP value from the PRB allocation (P-High -> 1 and P-Low -> 0)
25191 * Function : rgSCHCmnBuildRntpInfo
25193 * Processing Steps:
25195 * @param[in] U8 *rntpPtr
25196 * @param[in] U8 startRb
25197 * @param[in] U8 numRb
25203 PRIVATE S16 rgSCHCmnBuildRntpInfo
25212 PRIVATE S16 rgSCHCmnBuildRntpInfo(cell, rntpPtr, startRb, nmbRb, bw)
25220 U16 rbPtrStartIdx; /* Start Index of Octete Buffer to be filled */
25221 U16 rbPtrEndIdx; /* End Index of Octete Buffer to be filled */
25222 U16 rbBitLoc; /* Bit Location to be set as 1 in the current Byte */
25223 U16 nmbRbPerByte; /* PRB's to be set in the current Byte (in case of multiple Bytes) */
25225 TRC2(rgSCHCmnBuildRntpInfo);
25227 rbPtrStartIdx = (startRb)/8;
25228 rbPtrEndIdx = (startRb + nmbRb)/8;
25230 if (rntpPtr == NULLP)
25232 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId,
25233 "rgSCHCmnBuildRntpInfo():"
25234 "rntpPtr can't be NULLP (Memory Allocation Failed)");
25238 while(rbPtrStartIdx <= rbPtrEndIdx)
25240 rbBitLoc = (startRb)%8;
25242 /* case 1: startRb and endRb lies in same Byte */
25243 if (rbPtrStartIdx == rbPtrEndIdx)
25245 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25246 | (((1<<nmbRb)-1)<<rbBitLoc);
25249 /* case 2: startRb and endRb lies in different Byte */
25250 if (rbPtrStartIdx != rbPtrEndIdx)
25252 nmbRbPerByte = 8 - rbBitLoc;
25253 nmbRb = nmbRb - nmbRbPerByte;
25254 rntpPtr[rbPtrStartIdx] = rntpPtr[rbPtrStartIdx]
25255 | (((1<<nmbRbPerByte)-1)<<rbBitLoc);
25256 startRb = startRb + nmbRbPerByte;
25262 /* dsfr_pal_fixes ** 21-March-2013 ** SKS ** Adding Debug logs */
25264 /* dsfr_pal_fixes ** 25-March-2013 ** SKS ** Adding Debug logs to print RNTP */
25270 * @brief To update non-DLFS alloc'n parameters after TYPE2 Allocation.
25274 * Function : rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25276 * Processing Steps:
25278 * @param[in] RgSchCellCb *cell
25279 * @param[in] RgSchDlSf *dlSf
25280 * @param[in] U8 rbStrt
25281 * @param[in] U8 numRb
25286 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc
25291 RgSchSFRPoolInfo *sfrPool,
25296 PRIVATE S16 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrPool, rbStrt, numRb)
25300 RgSchSFRPoolInfo *sfrPool;
25309 TRC2(rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc);
25310 dlSf->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25311 sfrPool->type2End = RGSCH_CEIL((rbStrt+numRb), cell->rbgSize);
25314 dlSf->type2Start += numRb;
25315 dlSf->bwAlloced += numRb;
25317 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
25319 /* Based on RNTP info, the CC user is assigned high power per subframe basis */
25320 if(FALSE == ue->lteAdvUeCb.rgrLteAdvUeCfg.isUeCellEdge)
25322 if((sfrPool->type2Start >= sfrPool->pwrHiCCRange.startRb) &&
25323 (sfrPool->type2Start + numRb < sfrPool->pwrHiCCRange.endRb))
25325 ue->lteAdvUeCb.isCCUePHigh = TRUE;
25327 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25328 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25331 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId,"rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25332 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25339 /* Calling rgSCHCmnBuildRntpInfo function to update RNTP BitMap */
25340 ret = rgSCHCmnBuildRntpInfo(cell, dlSf->rntpInfo.val, sfrPool->type2Start, numRb, dlSf->bw);
25343 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc():"
25344 "rgSCHCmnBuildRntpInfo() function returned RFAILED for CRNTI:%d",ue->ueId);
25349 sfrPool->type2Start += numRb;
25350 sfrPool->bwAlloced += numRb;
25357 * @brief To do DL allocation using TYPE0 RA.
25361 * Function : rgSCHCmnNonDlfsSFRPoolType0Alloc
25363 * Processing Steps:
25364 * - Perform TYPE0 allocation using the RBGs between type0End and type2End.
25365 * - Build the allocation mask as per RBG positioning.
25366 * - Update the allocation parameters.
25368 * @param[in] RgSchCellCb *cell
25369 * @param[in] RgSchDlSf *dlSf
25370 * @param[in] RgSchDlRbAlloc *allocInfo
25375 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc
25379 RgSchSFRPoolInfo *poolInfo,
25380 RgSchDlRbAlloc *allocInfo
25383 PRIVATE Void rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, poolInfo, allocInfo)
25386 RgSchSFRPoolInfo *poolInfo;
25387 RgSchDlRbAlloc *allocInfo;
25390 U32 dlAllocMsk = 0;
25397 TRC2(rgSCHCmnNonDlfsSFRPoolType0Alloc);
25399 if (poolInfo->poolstartRB + poolInfo->bw == dlSf->bw)
25401 if (poolInfo->type0End == dlSf->bw/4)
25403 rbgFiller = dlSf->lstRbgDfct;
25404 /* The last RBG which can be smaller than the RBG size is consedered
25405 * only for the first time allocation of TYPE0 UE */
25406 dlSf->lstRbgDfct = 0;
25410 noRbgs = RGSCH_CEIL((allocInfo->rbsReq + rbgFiller), cell->rbgSize);
25412 /* Abhinav to-do start */
25413 /* MS_FIX for ccpu00123919*/
25414 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25415 if (dlSf->bwAlloced + noRbs > dlSf->bw)
25421 noRbs = (noRbgs * cell->rbgSize) - rbgFiller;
25423 /* Abhinav to-do end */
25427 /* type0End would have been initially (during subfrm Init) at the bit position
25428 * (cell->noOfRbgs - 1), 0 being the most significant.
25429 * Getting DlAllocMsk for noRbgs and at the appropriate position */
25430 dlAllocMsk |= (((1 << noRbgs) - 1) << (31 - poolInfo->type0End));
25431 /* Move backwards the type0End pivot */
25432 poolInfo->type0End -= noRbgs;
25433 /*MS_FIX for ccpu00123919*/
25434 /*noRbs = (noRbgs * cell->rbgSize) - rbgFiller;*/
25435 /* Update the bwAlloced field accordingly */
25436 poolInfo->bwAlloced += noRbs + dlSf->lstRbgDfct;
25437 dlSf->bwAlloced += noRbs + dlSf->lstRbgDfct;
25439 /* Update Type0 Alloc Info */
25440 allocInfo->allocInfo.raType0.numDlAlloc = noRbgs;
25441 allocInfo->allocInfo.raType0.dlAllocBitMask |= dlAllocMsk;
25442 allocInfo->rbsAlloc = noRbs;
25444 /* Update Tb info for each scheduled TB */
25445 iTbs = allocInfo->tbInfo[0].iTbs;
25446 noLyr = allocInfo->tbInfo[0].noLyr;
25447 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant.
25448 * RETX TB Size is same as Init TX TB Size */
25449 if (allocInfo->tbInfo[0].tbCb->txCntr)
25451 allocInfo->tbInfo[0].bytesAlloc =
25452 allocInfo->tbInfo[0].bytesReq;
25456 allocInfo->tbInfo[0].bytesAlloc =
25457 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;
25460 if (allocInfo->tbInfo[1].schdlngForTb)
25462 iTbs = allocInfo->tbInfo[1].iTbs;
25463 noLyr = allocInfo->tbInfo[1].noLyr;
25464 /* Fix for ccpu00123919: For a RETX TB the iTbs is irrelevant
25465 * RETX TB Size is same as Init TX TB Size */
25466 if (allocInfo->tbInfo[1].tbCb->txCntr)
25468 allocInfo->tbInfo[1].bytesAlloc =
25469 allocInfo->tbInfo[1].bytesReq;
25473 allocInfo->tbInfo[1].bytesAlloc =
25474 rgTbSzTbl[noLyr - 1][iTbs][noRbs - 1]/8;;
25478 /* The last RBG which can be smaller than the RBG size is consedered
25479 * only for the first time allocation of TYPE0 UE */
25480 dlSf->lstRbgDfct = 0;
25485 * @brief Computes RNTP Info for a subframe.
25489 * Function : rgSCHCmnNonDlfsDsfrRntpComp
25491 * Processing Steps:
25492 * - Computes RNTP info from individual pools.
25494 * @param[in] RgSchDlSf *dlSf
25500 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp
25506 PRIVATE void rgSCHCmnNonDlfsDsfrRntpComp(cell, dlSf)
25511 PRIVATE U16 samples = 0;
25513 U16 bwBytes = (dlSf->bw-1)/8;
25514 RgrLoadInfIndInfo *rgrLoadInf;
25518 TRC2(rgSCHCmnNonDlfsDsfrRntpComp);
25520 len = (dlSf->bw % 8 == 0) ? dlSf->bw/8 : dlSf->bw/8 + 1;
25522 /* RNTP info is ORed every TTI and the sample is stored in cell control block */
25523 for(i = 0; i <= bwBytes; i++)
25525 cell->rntpAggrInfo.val[i] |= dlSf->rntpInfo.val[i];
25527 samples = samples + 1;
25528 /* After every 1000 ms, the RNTP info will be sent to application to be further sent to all neighbouring eNB
25529 informing them about the load indication for cell edge users */
25530 if(RG_SCH_MAX_RNTP_SAMPLES == samples)
25533 ret = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&rgrLoadInf,
25534 sizeof(RgrLoadInfIndInfo));
25537 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
25538 "allocate memory for sending LoadInfo");
25542 rgrLoadInf->u.rntpInfo.pres = cell->rntpAggrInfo.pres;
25543 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25544 rgrLoadInf->u.rntpInfo.len = len;
25546 /* dsfr_pal_fixes ** 21-March-2013 ** SKS */
25547 rgrLoadInf->u.rntpInfo.val = cell->rntpAggrInfo.val;
25548 rgrLoadInf->cellId = cell->cellId;
25550 /* dsfr_pal_fixes ** 22-March-2013 ** SKS */
25551 rgrLoadInf->bw = dlSf->bw;
25552 rgrLoadInf->type = RGR_SFR;
25554 ret = rgSCHUtlRgrLoadInfInd(cell, rgrLoadInf);
25557 RLOG_ARG0(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHCmnNonDlfsDsfrRntpComp():"
25558 "rgSCHUtlRgrLoadInfInd() returned RFAILED");
25561 cmMemset(cell->rntpAggrInfo.val,0,len);
25565 /* LTE_ADV_FLAG_REMOVED_END */
25567 /* LTE_ADV_FLAG_REMOVED_START */
25569 * @brief Performs RB allocation per UE from a pool.
25573 * Function : rgSCHCmnSFRNonDlfsUeRbAlloc
25575 * Processing Steps:
25576 * - Allocate consecutively available RBs.
25578 * @param[in] RgSchCellCb *cell
25579 * @param[in] RgSchUeCb *ue
25580 * @param[in] RgSchDlSf *dlSf
25581 * @param[out] U8 *isDlBwAvail
25589 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc
25597 PRIVATE S16 rgSCHCmnSFRNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25604 RgSchDlRbAlloc *allocInfo;
25605 RgSchCmnDlUe *dlUe;
25607 RgSchSFRPoolInfo *sfrpoolInfo = NULLP;
25609 TRC2(rgSCHCmnSFRNonDlfsUeRbAlloc);
25611 isUECellEdge = RG_SCH_CMN_IS_UE_CELL_EDGE(ue);
25613 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25614 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25615 *isDlBwAvail = TRUE;
25617 /*Find which pool is available for this UE*/
25618 if (rgSCHCmnNonDlfsSFRBwAvlbl(cell, &sfrpoolInfo, dlSf, allocInfo, isUECellEdge) != TRUE)
25620 /* SFR_FIX - If this is CE UE there may be BW available in CC Pool
25621 So CC UEs will be scheduled */
25624 *isDlBwAvail = TRUE;
25628 *isDlBwAvail = FALSE;
25633 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX || dlUe->proc->tbInfo[1].isAckNackDtx)
25635 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25639 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25642 if (!(allocInfo->pdcch))
25644 /* Returning ROK since PDCCH might be available for another UE and further allocations could be done */
25649 allocInfo->rnti = ue->ueId;
25652 if (allocInfo->raType == RG_SCH_CMN_RA_TYPE2)
25654 allocInfo->allocInfo.raType2.isLocal = TRUE;
25655 /* rg004.201 patch - ccpu00109921 fix end */
25656 /* MS_FIX for ccpu00123918*/
25657 allocInfo->allocInfo.raType2.rbStart = (U8)sfrpoolInfo->type2Start;
25658 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25659 /* rg007.201 - Changes for MIMO feature addition */
25660 /* rg008.201 - Removed dependency on MIMO compile-time flag */
25661 rgSCHCmnNonDlfsUpdSFRPoolTyp2Alloc(cell, ue, dlSf, sfrpoolInfo, \
25662 allocInfo->allocInfo.raType2.rbStart, \
25663 allocInfo->allocInfo.raType2.numRb);
25664 allocInfo->rbsAlloc = allocInfo->rbsReq;
25665 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25667 else if (allocInfo->raType == RG_SCH_CMN_RA_TYPE0)
25669 rgSCHCmnNonDlfsSFRPoolType0Alloc(cell, dlSf, sfrpoolInfo, allocInfo);
25673 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,0);
25674 if(allocInfo->tbInfo[1].schdlngForTb == TRUE)
25676 rgSCHCmnFindCodeRate(cell,dlSf,allocInfo,1);
25681 #if defined(LTEMAC_SPS)
25682 /* Update the sub-frame with new allocation */
25683 dlSf->bwAlloced += allocInfo->rbsReq;
25689 /* LTE_ADV_FLAG_REMOVED_END */
25690 #endif /* LTE_TDD */
25693 * @brief Performs RB allocation per UE for frequency non-selective cell.
25697 * Function : rgSCHCmnNonDlfsUeRbAlloc
25699 * Processing Steps:
25700 * - Allocate consecutively available RBs.
25702 * @param[in] RgSchCellCb *cell
25703 * @param[in] RgSchUeCb *ue
25704 * @param[in] RgSchDlSf *dlSf
25705 * @param[out] U8 *isDlBwAvail
25712 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc
25720 PRIVATE S16 rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, isDlBwAvail)
25727 RgSchDlRbAlloc *allocInfo;
25728 RgSchCmnDlUe *dlUe;
25732 TRC2(rgSCHCmnNonDlfsUeRbAlloc);
25735 RgSch5gtfUeCb *ue5gtfCb = &(ue->ue5gtfCb);
25736 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[ue5gtfCb->BeamId]);
25738 dlUe = RG_SCH_CMN_GET_DL_UE(ue,cell);
25739 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
25740 *isDlBwAvail = TRUE;
25742 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
25744 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25745 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
25747 printf("5GTF_ERROR vrbg allocated > 25\n");
25751 if (dlUe->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX
25752 || dlUe->proc->tbInfo[1].isAckNackDtx)
25754 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat, TRUE);
25758 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ue, dlSf, dlUe->mimoInfo.cwInfo[0].cqi, allocInfo->dciFormat,FALSE);
25760 if (!(allocInfo->pdcch))
25762 /* Returning ROK since PDCCH might be available for another UE and
25763 * further allocations could be done */
25764 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
25765 "5GTF_ERROR : PDCCH allocation failed :ue (%u)",
25767 printf("5GTF_ERROR PDCCH allocation failed\n");
25771 //maxPrb = RGSCH_MIN((allocInfo->vrbgReq * MAX_5GTF_VRBG_SIZE), ue5gtfCb->maxPrb);
25772 //maxPrb = RGSCH_MIN(maxPrb,
25773 //((beamInfo->totVrbgAvail - beamInfo->vrbgStart)* MAX_5GTF_VRBG_SIZE)));
25774 //TODO_SID Need to check for vrbg available after scheduling for same beam.
25775 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
25776 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
25777 //TODO_SID: Setting for max TP
25778 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
25779 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
25780 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
25781 allocInfo->tbInfo[0].tbCb->dlGrnt.SCID = 0;
25782 allocInfo->tbInfo[0].tbCb->dlGrnt.dciFormat = allocInfo->dciFormat;
25783 //Filling temporarily
25784 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
25785 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
25787 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25788 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
25789 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25797 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
25801 * Function : rgSCHCmnNonDlfsCcchSduAlloc
25803 * Processing Steps:
25804 * - For each element in the list, Call rgSCHCmnNonDlfsCcchSduRbAlloc().
25805 * - If allocation is successful, add the ueCb to scheduled list of CCCH
25807 * - else, add UeCb to non-scheduled list.
25809 * @param[in] RgSchCellCb *cell
25810 * @param[in, out] RgSchCmnCcchSduRbAlloc *allocInfo
25811 * @param[in] U8 isRetx
25816 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc
25819 RgSchCmnCcchSduRbAlloc *allocInfo,
25823 PRIVATE Void rgSCHCmnNonDlfsCcchSduAlloc(cell, allocInfo, isRetx)
25825 RgSchCmnCcchSduRbAlloc *allocInfo;
25830 CmLListCp *ccchSduLst = NULLP;
25831 CmLListCp *schdCcchSduLst = NULLP;
25832 CmLListCp *nonSchdCcchSduLst = NULLP;
25833 CmLList *schdLnkNode = NULLP;
25834 CmLList *toBeSchdLnk = NULLP;
25835 RgSchDlSf *dlSf = allocInfo->ccchSduDlSf;
25836 RgSchUeCb *ueCb = NULLP;
25837 RgSchDlHqProcCb *hqP = NULLP;
25838 TRC2(rgSCHCmnNonDlfsCcchSduAlloc);
25842 /* Initialize re-transmitting lists */
25843 ccchSduLst = &(allocInfo->ccchSduRetxLst);
25844 schdCcchSduLst = &(allocInfo->schdCcchSduRetxLst);
25845 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduRetxLst);
25849 /* Initialize transmitting lists */
25850 ccchSduLst = &(allocInfo->ccchSduTxLst);
25851 schdCcchSduLst = &(allocInfo->schdCcchSduTxLst);
25852 nonSchdCcchSduLst = &(allocInfo->nonSchdCcchSduTxLst);
25855 /* Perform allocaations for the list */
25856 toBeSchdLnk = cmLListFirst(ccchSduLst);
25857 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
25859 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25860 ueCb = hqP->hqE->ue;
25861 schdLnkNode = &hqP->schdLstLnk;
25862 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25863 ret = rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf);
25866 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
25867 * list and return */
25870 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
25871 ueCb = hqP->hqE->ue;
25872 schdLnkNode = &hqP->schdLstLnk;
25873 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
25874 cmLListAdd2Tail(nonSchdCcchSduLst, schdLnkNode);
25875 toBeSchdLnk = toBeSchdLnk->next;
25876 } while(toBeSchdLnk);
25880 /* Allocation successful: Add UE to the scheduled list */
25881 cmLListAdd2Tail(schdCcchSduLst, schdLnkNode);
25889 * @brief Performs RB allocation for CcchSdu for frequency non-selective cell.
25893 * Function : rgSCHCmnNonDlfsCcchSduRbAlloc
25895 * Processing Steps:
25897 * - Allocate consecutively available RBs
25899 * @param[in] RgSchCellCb *cell
25900 * @param[in] RgSchUeCb *ueCb
25901 * @param[in] RgSchDlSf *dlSf
25907 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc
25914 PRIVATE S16 rgSCHCmnNonDlfsCcchSduRbAlloc(cell, ueCb, dlSf)
25920 RgSchDlRbAlloc *allocInfo;
25921 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ueCb,cell);
25923 TRC2(rgSCHCmnNonDlfsCcchSduRbAlloc);
25926 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ueCb,cell);
25928 /* [ccpu00138802]-MOD-If Bw is less than required, return fail
25929 It will be allocated in next TTI */
25931 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
25932 (dlSf->bwAlloced == dlSf->bw))
25934 if((dlSf->bwAlloced == dlSf->bw) ||
25935 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
25940 /* Retrieve PDCCH */
25941 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
25942 if (ueDl->proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
25944 /* allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, dlSf, y, ueDl->cqi,
25945 * TFU_DCI_FORMAT_1A, TRUE);*/
25946 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, TRUE);
25950 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, ueCb, dlSf, ueDl->mimoInfo.cwInfo[0].cqi, TFU_DCI_FORMAT_1A, FALSE);
25952 if (!(allocInfo->pdcch))
25954 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
25958 /* Update allocation information */
25959 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
25960 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
25961 allocInfo->allocInfo.raType2.isLocal = TRUE;
25963 /*Fix for ccpu00123918*/
25964 /* Push this harq process back to the free queue */
25965 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
25966 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
25967 allocInfo->rbsAlloc = allocInfo->rbsReq;
25968 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
25969 /* Update the sub-frame with new allocation */
25971 /* LTE_ADV_FLAG_REMOVED_START */
25973 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
25975 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf,
25976 allocInfo->allocInfo.raType2.rbStart,
25977 allocInfo->allocInfo.raType2.numRb);
25980 #endif /* end of ifndef LTE_TDD*/
25982 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf,
25983 allocInfo->allocInfo.raType2.rbStart,
25984 allocInfo->allocInfo.raType2.numRb);
25987 /* LTE_ADV_FLAG_REMOVED_END */
25988 /* ccpu00131941 - bwAlloced is updated from SPS bandwidth */
25996 * @brief Performs RB allocation for Msg4 for frequency non-selective cell.
26000 * Function : rgSCHCmnNonDlfsMsg4RbAlloc
26002 * Processing Steps:
26004 * - Allocate consecutively available RBs
26006 * @param[in] RgSchCellCb *cell
26007 * @param[in] RgSchRaCb *raCb
26008 * @param[in] RgSchDlSf *dlSf
26014 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc
26021 PRIVATE S16 rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf)
26027 RgSchDlRbAlloc *allocInfo;
26028 TRC2(rgSCHCmnNonDlfsMsg4RbAlloc);
26031 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_RACB(raCb);
26034 RgSchSfBeamInfo *beamInfo = &(dlSf->sfBeamInfo[0]);
26035 if(beamInfo->totVrbgAllocated > MAX_5GTF_VRBG)
26037 RLOG_ARG1(L_ERROR ,DBG_CELLID,cell->cellId,
26038 "5GTF_ERROR : vrbg allocated > 25 :ue (%u)",
26040 printf("5GTF_ERROR vrbg allocated > 25\n");
26045 if ((dlSf->spsAllocdBw >= cell->spsBwRbgInfo.numRbs) &&
26046 (dlSf->bwAlloced == dlSf->bw))
26048 if((dlSf->bwAlloced == dlSf->bw) ||
26049 (allocInfo->rbsReq > (dlSf->bw - dlSf->bwAlloced)))
26056 /* DTX Changes: One Variable is passed to check whether it is DTX or Not */
26057 if (raCb->dlHqE->msg4Proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX)
26059 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, TRUE);
26063 allocInfo->pdcch = rgSCHCmnPdcchAlloc(cell, raCb->ue, dlSf, raCb->ccchCqi, TFU_DCI_FORMAT_B1, FALSE);
26065 if (!(allocInfo->pdcch))
26067 /* Returning RFAILED since PDCCH not available for any CCCH allocations */
26072 /* SR_RACH_STATS : MSG4 TX Failed */
26073 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26075 /* Update allocation information */
26076 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
26077 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
26078 allocInfo->allocInfo.raType2.isLocal = TRUE;
26081 /*Fix for ccpu00123918*/
26082 allocInfo->allocInfo.raType2.rbStart = (U8)dlSf->type2Start;
26083 allocInfo->allocInfo.raType2.numRb = allocInfo->rbsReq;
26084 /* LTE_ADV_FLAG_REMOVED_START */
26086 if (cell->lteAdvCb.sfrCfg.status == RGR_ENABLE)
26088 rgSCHCmnNonDlfsSFRCmnChannelUpdTyp2Alloc(cell, dlSf, \
26089 allocInfo->allocInfo.raType2.rbStart, \
26090 allocInfo->allocInfo.raType2.numRb);
26093 #endif /* end of ifndef LTE_TDD */
26095 rgSCHCmnNonDlfsUpdTyp2Alloc(cell, dlSf, \
26096 allocInfo->allocInfo.raType2.rbStart, \
26097 allocInfo->allocInfo.raType2.numRb);
26099 /* LTE_ADV_FLAG_REMOVED_END */
26101 allocInfo->rbsAlloc = allocInfo->rbsReq;
26102 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26106 allocInfo->pdcch->dci.u.format1aInfo.t.pdschInfo.isTBMsg4 = TRUE;
26108 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart = beamInfo->vrbgStart;
26109 allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg = allocInfo->vrbgReq;
26111 /* Update allocation information */
26112 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26114 allocInfo->tbInfo[0].tbCb->dlGrnt.xPDSCHRange = 1;
26115 allocInfo->tbInfo[0].tbCb->dlGrnt.rbAssign = rgSCHCmnCalcRiv(MAX_5GTF_VRBG,
26116 allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart, allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg);
26118 allocInfo->tbInfo[0].tbCb->dlGrnt.rbStrt = (allocInfo->tbInfo[0].tbCb->dlGrnt.vrbgStart * MAX_5GTF_VRBG_SIZE);
26119 allocInfo->tbInfo[0].tbCb->dlGrnt.numRb = (allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg * MAX_5GTF_VRBG_SIZE);
26122 beamInfo->vrbgStart += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26123 beamInfo->totVrbgAllocated += allocInfo->tbInfo[0].tbCb->dlGrnt.numVrbg;
26124 allocInfo->tbInfo[0].bytesAlloc = allocInfo->tbInfo[0].bytesReq;
26132 * @brief Performs RB allocation for Msg4 lists of frequency non-selective cell.
26136 * Function : rgSCHCmnNonDlfsMsg4Alloc
26138 * Processing Steps:
26139 * - For each element in the list, Call rgSCHCmnNonDlfsMsg4RbAlloc().
26140 * - If allocation is successful, add the raCb to scheduled list of MSG4.
26141 * - else, add RaCb to non-scheduled list.
26143 * @param[in] RgSchCellCb *cell
26144 * @param[in, out] RgSchCmnMsg4RbAlloc *allocInfo
26145 * @param[in] U8 isRetx
26150 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc
26153 RgSchCmnMsg4RbAlloc *allocInfo,
26157 PRIVATE Void rgSCHCmnNonDlfsMsg4Alloc(cell, allocInfo, isRetx)
26159 RgSchCmnMsg4RbAlloc *allocInfo;
26164 CmLListCp *msg4Lst = NULLP;
26165 CmLListCp *schdMsg4Lst = NULLP;
26166 CmLListCp *nonSchdMsg4Lst = NULLP;
26167 CmLList *schdLnkNode = NULLP;
26168 CmLList *toBeSchdLnk = NULLP;
26169 RgSchDlSf *dlSf = allocInfo->msg4DlSf;
26170 RgSchRaCb *raCb = NULLP;
26171 RgSchDlHqProcCb *hqP = NULLP;
26172 TRC2(rgSCHCmnNonDlfsMsg4Alloc);
26176 /* Initialize re-transmitting lists */
26177 msg4Lst = &(allocInfo->msg4RetxLst);
26178 schdMsg4Lst = &(allocInfo->schdMsg4RetxLst);
26179 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4RetxLst);
26183 /* Initialize transmitting lists */
26184 msg4Lst = &(allocInfo->msg4TxLst);
26185 schdMsg4Lst = &(allocInfo->schdMsg4TxLst);
26186 nonSchdMsg4Lst = &(allocInfo->nonSchdMsg4TxLst);
26189 /* Perform allocaations for the list */
26190 toBeSchdLnk = cmLListFirst(msg4Lst);
26191 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26193 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26194 raCb = hqP->hqE->raCb;
26195 schdLnkNode = &hqP->schdLstLnk;
26196 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26197 ret = rgSCHCmnNonDlfsMsg4RbAlloc(cell, raCb, dlSf);
26200 /* Allocation failed: Add remaining MSG4 nodes to non-scheduled
26201 * list and return */
26204 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26205 raCb = hqP->hqE->raCb;
26206 schdLnkNode = &hqP->schdLstLnk;
26207 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26208 cmLListAdd2Tail(nonSchdMsg4Lst, schdLnkNode);
26209 toBeSchdLnk = toBeSchdLnk->next;
26210 } while(toBeSchdLnk);
26214 /* Allocation successful: Add UE to the scheduled list */
26215 cmLListAdd2Tail(schdMsg4Lst, schdLnkNode);
26226 * @brief Performs RB allocation for the list of UEs of a frequency
26227 * non-selective cell.
26231 * Function : rgSCHCmnNonDlfsDedRbAlloc
26233 * Processing Steps:
26234 * - For each element in the list, Call rgSCHCmnNonDlfsUeRbAlloc().
26235 * - If allocation is successful, add the ueCb to scheduled list of UEs.
26236 * - else, add ueCb to non-scheduled list of UEs.
26238 * @param[in] RgSchCellCb *cell
26239 * @param[in, out] RgSchCmnUeRbAlloc *allocInfo
26240 * @param[in] CmLListCp *ueLst,
26241 * @param[in, out] CmLListCp *schdHqPLst,
26242 * @param[in, out] CmLListCp *nonSchdHqPLst
26247 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc
26250 RgSchCmnUeRbAlloc *allocInfo,
26252 CmLListCp *schdHqPLst,
26253 CmLListCp *nonSchdHqPLst
26256 PUBLIC Void rgSCHCmnNonDlfsDedRbAlloc(cell, allocInfo, ueLst,
26257 schdHqPLst, nonSchdHqPLst)
26259 RgSchCmnUeRbAlloc *allocInfo;
26261 CmLListCp *schdHqPLst;
26262 CmLListCp *nonSchdHqPLst;
26266 CmLList *schdLnkNode = NULLP;
26267 CmLList *toBeSchdLnk = NULLP;
26268 RgSchDlSf *dlSf = allocInfo->dedDlSf;
26269 RgSchUeCb *ue = NULLP;
26270 RgSchDlHqProcCb *hqP = NULLP;
26272 TRC2(rgSCHCmnNonDlfsDedRbAlloc);
26275 /* Perform allocaations for the list */
26276 toBeSchdLnk = cmLListFirst(ueLst);
26277 for (; toBeSchdLnk; toBeSchdLnk = toBeSchdLnk->next)
26279 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26281 schdLnkNode = &hqP->schdLstLnk;
26282 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26284 ret = rgSCHCmnNonDlfsUeRbAlloc(cell, ue, dlSf, &isDlBwAvail);
26287 /* Allocation failed: Add remaining UEs to non-scheduled
26288 * list and return */
26291 hqP = (RgSchDlHqProcCb *)(toBeSchdLnk->node);
26293 schdLnkNode = &hqP->schdLstLnk;
26294 RG_SCH_CMN_INIT_SCHD_LNK(schdLnkNode, hqP);
26295 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26296 toBeSchdLnk = toBeSchdLnk->next;
26297 } while(toBeSchdLnk);
26303 #if defined (TENB_STATS) && defined (RG_5GTF)
26304 cell->tenbStats->sch.dl5gtfRbAllocPass++;
26306 /* Allocation successful: Add UE to the scheduled list */
26307 cmLListAdd2Tail(schdHqPLst, schdLnkNode);
26311 #if defined (TENB_STATS) && defined (RG_5GTF)
26312 cell->tenbStats->sch.dl5gtfRbAllocFail++;
26314 /* Allocation failed : Add UE to the non-scheduled list */
26315 printf("5GTF_ERROR Dl rb alloc failed adding nonSchdHqPLst\n");
26316 cmLListAdd2Tail(nonSchdHqPLst, schdLnkNode);
26324 * @brief Handles RB allocation for frequency non-selective cell.
26328 * Function : rgSCHCmnNonDlfsRbAlloc
26330 * Invoking Module Processing:
26331 * - SCH shall invoke this if downlink frequency selective is disabled for
26332 * the cell for RB allocation.
26333 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
26334 * estimate and subframe for each allocation to be made to SCH.
26336 * Processing Steps:
26337 * - Allocate sequentially for common channels.
26338 * - For transmitting and re-transmitting UE list.
26340 * - Perform wide-band allocations for UE in increasing order of
26342 * - Determine Imcs for the allocation.
26343 * - Determine RA type.
26344 * - Determine DCI format.
26346 * @param[in] RgSchCellCb *cell
26347 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
26352 PUBLIC Void rgSCHCmnNonDlfsRbAlloc
26355 RgSchCmnDlRbAllocInfo *allocInfo
26358 PUBLIC Void rgSCHCmnNonDlfsRbAlloc(cell, allocInfo)
26360 RgSchCmnDlRbAllocInfo *allocInfo;
26364 RgSchDlRbAlloc *reqAllocInfo;
26365 TRC2(rgSCHCmnNonDlfsRbAlloc);
26367 /* Allocate for MSG4 retransmissions */
26368 if (allocInfo->msg4Alloc.msg4RetxLst.count)
26370 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc RetxLst\n");
26371 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), TRUE);
26374 /* Allocate for MSG4 transmissions */
26375 /* Assuming all the nodes in the list need allocations: rbsReq is valid */
26376 if (allocInfo->msg4Alloc.msg4TxLst.count)
26378 printf("5GTF_ERROR rgSCHCmnNonDlfsMsg4Alloc txLst\n");
26379 rgSCHCmnNonDlfsMsg4Alloc(cell, &(allocInfo->msg4Alloc), FALSE);
26382 /* Allocate for CCCH SDU (received after guard timer expiry)
26383 * retransmissions */
26384 if (allocInfo->ccchSduAlloc.ccchSduRetxLst.count)
26386 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26387 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), TRUE);
26390 /* Allocate for CCCD SDU transmissions */
26391 /* Allocate for CCCH SDU (received after guard timer expiry) transmissions */
26392 if (allocInfo->ccchSduAlloc.ccchSduTxLst.count)
26394 printf("5GTF_ERROR rgSCHCmnNonDlfsCcchSduAlloc\n");
26395 rgSCHCmnNonDlfsCcchSduAlloc(cell, &(allocInfo->ccchSduAlloc), FALSE);
26399 /* Allocate for Random access response */
26400 for (raRspCnt = 0; raRspCnt < RG_SCH_CMN_MAX_CMN_PDCCH; ++raRspCnt)
26402 /* Assuming that the requests will be filled in sequentially */
26403 reqAllocInfo = &(allocInfo->raRspAlloc[raRspCnt]);
26404 if (!reqAllocInfo->rbsReq)
26408 printf("5GTF_ERROR calling RAR rgSCHCmnNonDlfsCmnRbAlloc\n");
26409 // if ((rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo)) != ROK)
26410 if ((rgSCHCmnNonDlfsCmnRbAllocRar(cell, reqAllocInfo)) != ROK)
26416 /* Allocate for RETX+TX UEs */
26417 if(allocInfo->dedAlloc.txRetxHqPLst.count)
26419 printf("5GTF_ERROR TX RETX rgSCHCmnNonDlfsDedRbAlloc\n");
26420 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26421 &(allocInfo->dedAlloc.txRetxHqPLst),
26422 &(allocInfo->dedAlloc.schdTxRetxHqPLst),
26423 &(allocInfo->dedAlloc.nonSchdTxRetxHqPLst));
26426 if((allocInfo->dedAlloc.retxHqPLst.count))
26428 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26429 &(allocInfo->dedAlloc.retxHqPLst),
26430 &(allocInfo->dedAlloc.schdRetxHqPLst),
26431 &(allocInfo->dedAlloc.nonSchdRetxHqPLst));
26434 /* Allocate for transmitting UEs */
26435 if((allocInfo->dedAlloc.txHqPLst.count))
26437 rgSCHCmnNonDlfsDedRbAlloc(cell, &(allocInfo->dedAlloc),
26438 &(allocInfo->dedAlloc.txHqPLst),
26439 &(allocInfo->dedAlloc.schdTxHqPLst),
26440 &(allocInfo->dedAlloc.nonSchdTxHqPLst));
26443 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cell);
26444 if ((allocInfo->dedAlloc.txRetxHqPLst.count +
26445 allocInfo->dedAlloc.retxHqPLst.count +
26446 allocInfo->dedAlloc.txHqPLst.count) >
26447 cmnCell->dl.maxUePerDlSf)
26449 #ifndef ALIGN_64BIT
26450 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26451 " scheduler exceed maximumUePerDlSf(%u)tx-retx %ld retx %ld tx %ld\n",
26452 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26453 allocInfo->dedAlloc.retxHqPLst.count,
26454 allocInfo->dedAlloc.txHqPLst.count));
26456 RGSCHDBGERRNEW(cell->instIdx,(rgSchPBuf(cell->instIdx),"UEs selected by"
26457 " scheduler exceed maximumUePerDlSf(%u)tx-retx %d retx %d tx %d\n",
26458 cmnCell->dl.maxUePerDlSf, allocInfo->dedAlloc.txRetxHqPLst.count,
26459 allocInfo->dedAlloc.retxHqPLst.count,
26460 allocInfo->dedAlloc.txHqPLst.count));
26465 /* LTE_ADV_FLAG_REMOVED_START */
26466 if(cell->lteAdvCb.dsfrCfg.status == RGR_ENABLE)
26468 printf("5GTF_ERROR RETX rgSCHCmnNonDlfsDsfrRntpComp\n");
26469 rgSCHCmnNonDlfsDsfrRntpComp(cell, allocInfo->dedAlloc.dedDlSf);
26471 /* LTE_ADV_FLAG_REMOVED_END */
26472 #endif /* LTE_TDD */
26476 /***********************************************************
26478 * Func : rgSCHCmnCalcRiv
26480 * Desc : This function calculates RIV.
26486 * File : rg_sch_utl.c
26488 **********************************************************/
26491 PUBLIC U32 rgSCHCmnCalcRiv
26498 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26505 PUBLIC U32 rgSCHCmnCalcRiv
26512 PUBLIC U32 rgSCHCmnCalcRiv(bw, rbStart, numRb)
26519 U8 numRbMinus1 = numRb - 1;
26522 TRC2(rgSCHCmnCalcRiv);
26524 if (numRbMinus1 <= bw/2)
26526 riv = bw * numRbMinus1 + rbStart;
26530 riv = bw * (bw - numRbMinus1) + (bw - rbStart - 1);
26533 } /* rgSCHCmnCalcRiv */
26537 * @brief This function allocates and copies the RACH response scheduling
26538 * related information into cell control block.
26542 * Function: rgSCHCmnDlCpyRachInfo
26543 * Purpose: This function allocates and copies the RACH response
26544 * scheduling related information into cell control block
26545 * for each DL subframe.
26548 * Invoked by: Scheduler
26550 * @param[in] RgSchCellCb* cell
26551 * @param[in] RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES]
26552 * @param[in] U8 raArrSz
26557 PRIVATE S16 rgSCHCmnDlCpyRachInfo
26560 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES],
26564 PRIVATE S16 rgSCHCmnDlCpyRachInfo(cell, rachRspLst, raArrSz)
26566 RgSchTddRachRspLst rachRspLst[][RGSCH_NUM_SUB_FRAMES];
26570 U8 ulDlCfgIdx = cell->ulDlCfgIdx;
26579 TRC2(rgSCHCmnDlCpyRachInfo);
26581 /* Allocate RACH response information for each DL
26582 * subframe in a radio frame */
26583 ret = rgSCHUtlAllocSBuf(cell->instIdx, (Data **)&cell->rachRspLst,
26584 rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][RGSCH_NUM_SUB_FRAMES-1] *
26585 sizeof(RgSchTddRachRspLst));
26591 for(sfnIdx=raArrSz-1; sfnIdx>=0; sfnIdx--)
26593 for(subfrmIdx=0; subfrmIdx < RGSCH_NUM_SUB_FRAMES; subfrmIdx++)
26595 subfrmIdx = rgSchTddHighDlSubfrmIdxTbl[ulDlCfgIdx][subfrmIdx];
26596 if(subfrmIdx == RGSCH_NUM_SUB_FRAMES)
26601 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rachRspLst[sfnIdx],subfrmIdx);
26603 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26605 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, rgSchTddNumDlSubfrmTbl[ulDlCfgIdx],subfrmIdx);
26606 sfNum = rgSchTddNumDlSubfrmTbl[ulDlCfgIdx][subfrmIdx]-1;
26607 numRfs = cell->rachRspLst[sfNum].numRadiofrms;
26608 /* For each DL subframe in which RACH response can
26609 * be sent is updated */
26612 cell->rachRspLst[sfNum].rachRsp[numRfs].sfnOffset =
26613 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].sfnOffset;
26614 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26616 cell->rachRspLst[sfNum].rachRsp[numRfs].\
26617 subframe[sfcount] =
26618 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].\
26621 cell->rachRspLst[sfNum].rachRsp[numRfs].numSubfrms =
26622 rachRspLst[sfnIdx][subfrmIdx].rachRsp[0].numSubfrms;
26623 cell->rachRspLst[sfNum].numRadiofrms++;
26626 /* Copy the subframes to be deleted at ths subframe */
26628 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26631 cell->rachRspLst[sfNum].delInfo.sfnOffset =
26632 rachRspLst[sfnIdx][subfrmIdx].delInfo.sfnOffset;
26633 for(sfcount=0; sfcount < numSubfrms; sfcount++)
26635 cell->rachRspLst[sfNum].delInfo.subframe[sfcount] =
26636 rachRspLst[sfnIdx][subfrmIdx].delInfo.subframe[sfcount];
26638 cell->rachRspLst[sfNum].delInfo.numSubfrms =
26639 rachRspLst[sfnIdx][subfrmIdx].delInfo.numSubfrms;
26647 * @brief This function determines the iTbs based on the new CFI,
26648 * CQI and BLER based delta iTbs
26652 * Function: rgSchCmnFetchItbs
26653 * Purpose: Fetch the new iTbs when CFI changes.
26655 * @param[in] RgSchCellCb *cell
26656 * @param[in] RgSchCmnDlUe *ueDl
26657 * @param[in] U8 cqi
26664 PRIVATE S32 rgSchCmnFetchItbs
26667 RgSchCmnDlUe *ueDl,
26675 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, subFrm, cqi, cfi, cwIdx, noLyr)
26677 RgSchCmnDlUe *ueDl;
26686 PRIVATE S32 rgSchCmnFetchItbs
26689 RgSchCmnDlUe *ueDl,
26696 PRIVATE S32 rgSchCmnFetchItbs (cell, ueDl, cqi, cfi, cwIdx, noLyr)
26698 RgSchCmnDlUe *ueDl;
26707 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
26710 TRC2(rgSchCmnFetchItbs);
26713 /* Special Handling for Spl Sf when CFI is 3 as
26714 * CFI in Spl Sf will be max 2 */
26715 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
26717 if((cellDl->currCfi == 3) ||
26718 ((cell->bwCfg.dlTotalBw <= 10) && (cellDl->currCfi == 1)))
26720 /* Use CFI 2 in this case */
26721 iTbs = (ueDl->laCb[cwIdx].deltaiTbs +
26722 ((*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][2]))[cqi])* 100)/100;
26724 RG_SCH_CHK_ITBS_RANGE(iTbs, RGSCH_NUM_ITBS - 1);
26728 iTbs = ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1];
26730 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26732 else /* CFI Changed. Update with new iTbs Reset the BLER*/
26735 S32 tmpiTbs = (*(RgSchCmnCqiToTbs *)(cellDl->cqiToTbsTbl[0][cfi]))[cqi];
26737 iTbs = (ueDl->laCb[cwIdx].deltaiTbs + tmpiTbs*100)/100;
26739 RG_SCH_CHK_ITBS_RANGE(iTbs, tmpiTbs);
26741 iTbs = RGSCH_MIN(iTbs, cell->thresholds.maxDlItbs);
26743 ueDl->mimoInfo.cwInfo[cwIdx].iTbs[noLyr - 1] = iTbs;
26745 ueDl->lastCfi = cfi;
26746 ueDl->laCb[cwIdx].deltaiTbs = 0;
26753 * @brief This function determines the RBs and Bytes required for BO
26754 * transmission for UEs configured with TM 1/2/6/7.
26758 * Function: rgSCHCmnDlAllocTxRb1Tb1Cw
26759 * Purpose: Allocate TB1 on CW1.
26761 * Reference Parameter effBo is filled with alloced bytes.
26762 * Returns RFAILED if BO not satisfied at all.
26764 * Invoked by: rgSCHCmnDlAllocTxRbTM1/2/6/7
26766 * @param[in] RgSchCellCb *cell
26767 * @param[in] RgSchDlSf *subFrm
26768 * @param[in] RgSchUeCb *ue
26769 * @param[in] U32 bo
26770 * @param[out] U32 *effBo
26771 * @param[in] RgSchDlHqProcCb *proc
26772 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26777 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw
26784 RgSchDlHqProcCb *proc,
26785 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26788 PRIVATE Void rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26794 RgSchDlHqProcCb *proc;
26795 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26798 RgSchDlRbAlloc *allocInfo;
26801 TRC2(rgSCHCmnDlAllocTxRb1Tb1Cw);
26804 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26806 if (ue->ue5gtfCb.rank == 2)
26808 allocInfo->dciFormat = TFU_DCI_FORMAT_B2;
26812 allocInfo->dciFormat = TFU_DCI_FORMAT_B1;
26815 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26816 allocInfo->raType);
26818 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
26819 bo, &numRb, effBo);
26820 if (ret == RFAILED)
26822 /* If allocation couldn't be made then return */
26825 /* Adding UE to RbAllocInfo TX Lst */
26826 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
26827 /* Fill UE alloc Info */
26828 allocInfo->rbsReq = numRb;
26829 allocInfo->dlSf = subFrm;
26831 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26839 * @brief This function determines the RBs and Bytes required for BO
26840 * retransmission for UEs configured with TM 1/2/6/7.
26844 * Function: rgSCHCmnDlAllocRetxRb1Tb1Cw
26845 * Purpose: Allocate TB1 on CW1.
26847 * Reference Parameter effBo is filled with alloced bytes.
26848 * Returns RFAILED if BO not satisfied at all.
26850 * Invoked by: rgSCHCmnDlAllocRetxRbTM1/2/6/7
26852 * @param[in] RgSchCellCb *cell
26853 * @param[in] RgSchDlSf *subFrm
26854 * @param[in] RgSchUeCb *ue
26855 * @param[in] U32 bo
26856 * @param[out] U32 *effBo
26857 * @param[in] RgSchDlHqProcCb *proc
26858 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26863 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw
26870 RgSchDlHqProcCb *proc,
26871 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26874 PRIVATE Void rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26880 RgSchDlHqProcCb *proc;
26881 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26884 RgSchDlRbAlloc *allocInfo;
26887 TRC2(rgSCHCmnDlAllocRetxRb1Tb1Cw);
26890 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
26893 /* 5GTF: RETX DCI format same as TX */
26894 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
26895 &allocInfo->raType);
26898 /* Get the Allocation in terms of RBs that are required for
26899 * this retx of TB1 */
26900 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
26902 if (ret == RFAILED)
26904 /* Allocation couldn't be made for Retx */
26905 /* Fix : syed If TxRetx allocation failed then add the UE along with the proc
26906 * to the nonSchdTxRetxUeLst and let spfc scheduler take care of it during
26908 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
26911 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
26912 /* Fill UE alloc Info */
26913 allocInfo->rbsReq = numRb;
26914 allocInfo->dlSf = subFrm;
26916 allocInfo->vrbgReq = numRb/MAX_5GTF_VRBG_SIZE;
26924 * @brief This function determines the RBs and Bytes required for BO
26925 * transmission for UEs configured with TM 2.
26929 * Function: rgSCHCmnDlAllocTxRbTM1
26932 * Reference Parameter effBo is filled with alloced bytes.
26933 * Returns RFAILED if BO not satisfied at all.
26935 * Invoked by: rgSCHCmnDlAllocTxRb
26937 * @param[in] RgSchCellCb *cell
26938 * @param[in] RgSchDlSf *subFrm
26939 * @param[in] RgSchUeCb *ue
26940 * @param[in] U32 bo
26941 * @param[out] U32 *effBo
26942 * @param[in] RgSchDlHqProcCb *proc
26943 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26948 PRIVATE Void rgSCHCmnDlAllocTxRbTM1
26955 RgSchDlHqProcCb *proc,
26956 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
26959 PRIVATE Void rgSCHCmnDlAllocTxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
26965 RgSchDlHqProcCb *proc;
26966 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
26969 TRC2(rgSCHCmnDlAllocTxRbTM1);
26970 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
26976 * @brief This function determines the RBs and Bytes required for BO
26977 * retransmission for UEs configured with TM 2.
26981 * Function: rgSCHCmnDlAllocRetxRbTM1
26984 * Reference Parameter effBo is filled with alloced bytes.
26985 * Returns RFAILED if BO not satisfied at all.
26987 * Invoked by: rgSCHCmnDlAllocRetxRb
26989 * @param[in] RgSchCellCb *cell
26990 * @param[in] RgSchDlSf *subFrm
26991 * @param[in] RgSchUeCb *ue
26992 * @param[in] U32 bo
26993 * @param[out] U32 *effBo
26994 * @param[in] RgSchDlHqProcCb *proc
26995 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27000 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1
27007 RgSchDlHqProcCb *proc,
27008 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27011 PRIVATE Void rgSCHCmnDlAllocRetxRbTM1(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27017 RgSchDlHqProcCb *proc;
27018 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27021 TRC2(rgSCHCmnDlAllocRetxRbTM1);
27022 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27028 * @brief This function determines the RBs and Bytes required for BO
27029 * transmission for UEs configured with TM 2.
27033 * Function: rgSCHCmnDlAllocTxRbTM2
27036 * Reference Parameter effBo is filled with alloced bytes.
27037 * Returns RFAILED if BO not satisfied at all.
27039 * Invoked by: rgSCHCmnDlAllocTxRb
27041 * @param[in] RgSchCellCb *cell
27042 * @param[in] RgSchDlSf *subFrm
27043 * @param[in] RgSchUeCb *ue
27044 * @param[in] U32 bo
27045 * @param[out] U32 *effBo
27046 * @param[in] RgSchDlHqProcCb *proc
27047 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27052 PRIVATE Void rgSCHCmnDlAllocTxRbTM2
27059 RgSchDlHqProcCb *proc,
27060 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27063 PRIVATE Void rgSCHCmnDlAllocTxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27069 RgSchDlHqProcCb *proc;
27070 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27073 TRC2(rgSCHCmnDlAllocTxRbTM2);
27074 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27080 * @brief This function determines the RBs and Bytes required for BO
27081 * retransmission for UEs configured with TM 2.
27085 * Function: rgSCHCmnDlAllocRetxRbTM2
27088 * Reference Parameter effBo is filled with alloced bytes.
27089 * Returns RFAILED if BO not satisfied at all.
27091 * Invoked by: rgSCHCmnDlAllocRetxRb
27093 * @param[in] RgSchCellCb *cell
27094 * @param[in] RgSchDlSf *subFrm
27095 * @param[in] RgSchUeCb *ue
27096 * @param[in] U32 bo
27097 * @param[out] U32 *effBo
27098 * @param[in] RgSchDlHqProcCb *proc
27099 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27104 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2
27111 RgSchDlHqProcCb *proc,
27112 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27115 PRIVATE Void rgSCHCmnDlAllocRetxRbTM2(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27121 RgSchDlHqProcCb *proc;
27122 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27125 TRC2(rgSCHCmnDlAllocRetxRbTM2);
27126 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
27132 * @brief This function determines the RBs and Bytes required for BO
27133 * transmission for UEs configured with TM 3.
27137 * Function: rgSCHCmnDlAllocTxRbTM3
27140 * Reference Parameter effBo is filled with alloced bytes.
27141 * Returns RFAILED if BO not satisfied at all.
27143 * Invoked by: rgSCHCmnDlAllocTxRb
27145 * @param[in] RgSchCellCb *cell
27146 * @param[in] RgSchDlSf *subFrm
27147 * @param[in] RgSchUeCb *ue
27148 * @param[in] U32 bo
27149 * @param[out] U32 *effBo
27150 * @param[in] RgSchDlHqProcCb *proc
27151 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27156 PRIVATE Void rgSCHCmnDlAllocTxRbTM3
27163 RgSchDlHqProcCb *proc,
27164 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27167 PRIVATE Void rgSCHCmnDlAllocTxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27173 RgSchDlHqProcCb *proc;
27174 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27178 TRC2(rgSCHCmnDlAllocTxRbTM3);
27180 /* Both TBs free for TX allocation */
27181 rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo,\
27182 proc, cellWdAllocInfo);
27189 * @brief This function determines the RBs and Bytes required for BO
27190 * retransmission for UEs configured with TM 3.
27194 * Function: rgSCHCmnDlAllocRetxRbTM3
27197 * Reference Parameter effBo is filled with alloced bytes.
27198 * Returns RFAILED if BO not satisfied at all.
27200 * Invoked by: rgSCHCmnDlAllocRetxRb
27202 * @param[in] RgSchCellCb *cell
27203 * @param[in] RgSchDlSf *subFrm
27204 * @param[in] RgSchUeCb *ue
27205 * @param[in] U32 bo
27206 * @param[out] U32 *effBo
27207 * @param[in] RgSchDlHqProcCb *proc
27208 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27213 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3
27220 RgSchDlHqProcCb *proc,
27221 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27224 PRIVATE Void rgSCHCmnDlAllocRetxRbTM3(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27230 RgSchDlHqProcCb *proc;
27231 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27235 TRC2(rgSCHCmnDlAllocRetxRbTM3);
27237 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
27238 (proc->tbInfo[1].state == HQ_TB_NACKED))
27241 printf ("RETX RB TM3 nack for both hqp %d cell %d \n", proc->procId, proc->hqE->cell->cellId);
27243 /* Both TBs require RETX allocation */
27244 rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo,\
27245 proc, cellWdAllocInfo);
27249 /* One of the TBs need RETX allocation. Other TB may/maynot
27250 * be available for new TX allocation. */
27251 rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo,\
27252 proc, cellWdAllocInfo);
27260 * @brief This function performs the DCI format selection in case of
27261 * Transmit Diversity scheme where there can be more
27262 * than 1 option for DCI format selection.
27266 * Function: rgSCHCmnSlctPdcchFrmt
27267 * Purpose: 1. If DLFS is enabled, then choose TM specific
27268 * DCI format for Transmit diversity. All the
27269 * TM Specific DCI Formats support Type0 and/or
27270 * Type1 resource allocation scheme. DLFS
27271 * supports only Type-0&1 Resource allocation.
27272 * 2. If DLFS is not enabled, select a DCI format
27273 * which is of smaller size. Since Non-DLFS
27274 * scheduler supports all Resource allocation
27275 * schemes, selection is based on efficiency.
27277 * Invoked by: DL UE Allocation by Common Scheduler.
27279 * @param[in] RgSchCellCb *cell
27280 * @param[in] RgSchUeCb *ue
27281 * @param[out] U8 *raType
27282 * @return TfuDciFormat
27286 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt
27293 PUBLIC TfuDciFormat rgSCHCmnSlctPdcchFrmt(cell, ue, raType)
27299 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
27301 TRC2(rgSCHCmnSlctPdcchFrmt);
27303 /* ccpu00140894- Selective DCI Format and RA type should be selected only
27304 * after TX Mode transition is completed*/
27305 if ((cellSch->dl.isDlFreqSel) && (ue->txModeTransCmplt))
27307 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciRAType;
27308 return (rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].spfcDciFrmt);
27312 *raType = rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciRAType;
27313 return (rgSchCmnDciFrmtOptns[ue->mimoInfo.txMode-1].prfrdDciFrmt);
27319 * @brief This function handles Retx allocation in case of TM3 UEs
27320 * where both the TBs were NACKED previously.
27324 * Function: rgSCHCmnDlTM3RetxRetx
27325 * Purpose: If forceTD flag enabled
27326 * TD for TB1 on CW1.
27328 * DCI Frmt 2A and RA Type 0
27329 * RI layered SM of both TBs on 2 CWs
27330 * Add UE to cell Alloc Info.
27331 * Fill UE alloc Info.
27334 * Successful allocation is indicated by non-zero effBo value.
27336 * Invoked by: rgSCHCmnDlAllocRbTM3
27338 * @param[in] RgSchCellCb *cell
27339 * @param[in] RgSchDlSf *subFrm
27340 * @param[in] RgSchUeCb *ue
27341 * @param[in] U32 bo
27342 * @param[out] U32 *effBo
27343 * @param[in] RgSchDlHqProcCb *proc
27344 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27349 PRIVATE Void rgSCHCmnDlTM3RetxRetx
27356 RgSchDlHqProcCb *proc,
27357 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27360 PRIVATE Void rgSCHCmnDlTM3RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27366 RgSchDlHqProcCb *proc;
27367 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27371 RgSchDlRbAlloc *allocInfo;
27378 TRC2(rgSCHCmnDlTM3RetxRetx);
27381 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27383 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27385 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
27386 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27388 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27390 if (ret == RFAILED)
27392 /* Allocation couldn't be made for Retx */
27393 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27396 /* Fix for ccpu00123927: Retransmit 2 codewords irrespective of current rank */
27397 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27398 #ifdef FOUR_TX_ANTENNA
27399 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1 should
27400 * have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27401 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27404 proc->cwSwpEnabled = TRUE;
27407 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27408 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27412 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27415 /* Adding UE to allocInfo RETX Lst */
27416 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27418 /* Fill UE alloc Info scratch pad */
27419 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27420 precInfo, noTxLyrs, subFrm);
27427 * @brief This function handles Retx allocation in case of TM4 UEs
27428 * where both the TBs were NACKED previously.
27432 * Function: rgSCHCmnDlTM4RetxRetx
27433 * Purpose: If forceTD flag enabled
27434 * TD for TB1 on CW1.
27436 * DCI Frmt 2 and RA Type 0
27438 * 1 layer SM of TB1 on CW1.
27440 * RI layered SM of both TBs on 2 CWs
27441 * Add UE to cell Alloc Info.
27442 * Fill UE alloc Info.
27445 * Successful allocation is indicated by non-zero effBo value.
27447 * Invoked by: rgSCHCmnDlAllocRbTM4
27449 * @param[in] RgSchCellCb *cell
27450 * @param[in] RgSchDlSf *subFrm
27451 * @param[in] RgSchUeCb *ue
27452 * @param[in] U32 bo
27453 * @param[out] U32 *effBo
27454 * @param[in] RgSchDlHqProcCb *proc
27455 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27460 PRIVATE Void rgSCHCmnDlTM4RetxRetx
27467 RgSchDlHqProcCb *proc,
27468 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
27471 PRIVATE Void rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
27477 RgSchDlHqProcCb *proc;
27478 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
27482 RgSchDlRbAlloc *allocInfo;
27484 Bool swpFlg = FALSE;
27486 #ifdef FOUR_TX_ANTENNA
27491 TRC2(rgSCHCmnDlTM4RetxRetx);
27494 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
27496 /* Irrespective of RI Schedule both CWs */
27497 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
27498 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
27500 ret = rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc, &numRb, &swpFlg,\
27502 if (ret == RFAILED)
27504 /* Allocation couldn't be made for Retx */
27505 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
27508 noTxLyrs = proc->tbInfo[0].numLyrs + proc->tbInfo[1].numLyrs;
27510 #ifdef FOUR_TX_ANTENNA
27511 /*Chandra: For 4X4 MIM RETX with noTxLyrs=3, CW0 should be 1-LyrTB and CW1
27512 * should have 2-LyrTB as per Table 6.3.3.2-1 of 36.211 */
27513 if(noTxLyrs == 3 && proc->tbInfo[0].numLyrs==2)
27516 proc->cwSwpEnabled = TRUE;
27518 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
27519 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
27523 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
27526 /* Adding UE to allocInfo RETX Lst */
27527 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
27529 /* Fill UE alloc Info scratch pad */
27530 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
27531 precInfo, noTxLyrs, subFrm);
27539 * @brief This function determines Transmission attributes
27540 * incase of Spatial multiplexing for TX and RETX TBs.
27544 * Function: rgSCHCmnDlSMGetAttrForTxRetx
27545 * Purpose: 1. Reached here for a TM3/4 UE's HqP whose one of the TBs is
27546 * NACKED and the other TB is either NACKED or WAITING.
27547 * 2. Select the NACKED TB for RETX allocation.
27548 * 3. Allocation preference for RETX TB by mapping it to a better
27549 * CW (better in terms of efficiency).
27550 * 4. Determine the state of the other TB.
27551 * Determine if swapFlag were to be set.
27552 * Swap flag would be set if Retx TB is cross
27554 * 5. If UE has new data available for TX and if the other TB's state
27555 * is ACKED then set furtherScope as TRUE.
27557 * Invoked by: rgSCHCmnDlTM3[4]TxRetx
27559 * @param[in] RgSchUeCb *ue
27560 * @param[in] RgSchDlHqProcCb *proc
27561 * @param[out] RgSchDlHqTbCb **retxTb
27562 * @param[out] RgSchDlHqTbCb **txTb
27563 * @param[out] Bool *frthrScp
27564 * @param[out] Bool *swpFlg
27569 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx
27572 RgSchDlHqProcCb *proc,
27573 RgSchDlHqTbCb **retxTb,
27574 RgSchDlHqTbCb **txTb,
27579 PRIVATE Void rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, frthrScp,\
27582 RgSchDlHqProcCb *proc;
27583 RgSchDlHqTbCb **retxTb;
27584 RgSchDlHqTbCb **txTb;
27589 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,proc->hqE->cell);
27590 RgSchDlRbAlloc *allocInfo;
27592 TRC2(rgSCHCmnDlSMGetAttrForTxRetx);
27594 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27596 *retxTb = &proc->tbInfo[0];
27597 *txTb = &proc->tbInfo[1];
27598 /* TENB_BRDCM_TM4- Currently disabling swapflag for TM3/TM4, since
27599 * HqFeedback processing does not consider a swapped hq feedback */
27600 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 1))
27603 proc->cwSwpEnabled = TRUE;
27605 if (proc->tbInfo[1].state == HQ_TB_ACKED)
27607 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27608 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27613 *retxTb = &proc->tbInfo[1];
27614 *txTb = &proc->tbInfo[0];
27615 /* TENB_BRDCM_TM4 - Currently disabling swapflag for TM3/TM4, since
27616 * HqFeedback processing does not consider a swapped hq feedback */
27617 if ((ue->mimoInfo.txMode == RGR_UE_TM_4) && (ueDl->mimoInfo.btrCwIdx == 0))
27620 proc->cwSwpEnabled = TRUE;
27622 if (proc->tbInfo[0].state == HQ_TB_ACKED)
27624 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue, proc->hqE->cell);
27625 *frthrScp = allocInfo->mimoAllocInfo.hasNewTxData;
27633 * @brief Determine Precoding information for TM3 2 TX Antenna.
27637 * Function: rgSCHCmnDlTM3PrecInf2
27640 * Invoked by: rgSCHCmnDlGetAttrForTM3
27642 * @param[in] RgSchUeCb *ue
27643 * @param[in] U8 numTxLyrs
27644 * @param[in] Bool bothCwEnbld
27649 PRIVATE U8 rgSCHCmnDlTM3PrecInf2
27657 PRIVATE U8 rgSCHCmnDlTM3PrecInf2(ue, numTxLyrs, bothCwEnbld)
27664 TRC2(rgSCHCmnDlTM3PrecInf2);
27671 * @brief Determine Precoding information for TM4 2 TX Antenna.
27675 * Function: rgSCHCmnDlTM4PrecInf2
27676 * Purpose: To determine a logic of deriving precoding index
27677 * information from 36.212 table 5.3.3.1.5-4
27679 * Invoked by: rgSCHCmnDlGetAttrForTM4
27681 * @param[in] RgSchUeCb *ue
27682 * @param[in] U8 numTxLyrs
27683 * @param[in] Bool bothCwEnbld
27688 PRIVATE U8 rgSCHCmnDlTM4PrecInf2
27696 PRIVATE U8 rgSCHCmnDlTM4PrecInf2(ue, numTxLyrs, bothCwEnbld)
27703 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27706 TRC2(rgSCHCmnDlTM4PrecInf2);
27708 if (ueDl->mimoInfo.ri == numTxLyrs)
27710 if (ueDl->mimoInfo.ri == 2)
27712 /* PrecInfo corresponding to 2 CW
27714 if (ue->mimoInfo.puschFdbkVld)
27720 precIdx = ueDl->mimoInfo.pmi - 1;
27725 /* PrecInfo corresponding to 1 CW
27727 if (ue->mimoInfo.puschFdbkVld)
27733 precIdx = ueDl->mimoInfo.pmi + 1;
27737 else if (ueDl->mimoInfo.ri > numTxLyrs)
27739 /* In case of choosing among the columns of a
27740 * precoding matrix, choose the column corresponding
27741 * to the MAX-CQI */
27742 if (ue->mimoInfo.puschFdbkVld)
27748 precIdx = (ueDl->mimoInfo.pmi- 1)* 2 + 1;
27751 else /* if RI < numTxLyrs */
27753 precIdx = (ueDl->mimoInfo.pmi < 2)? 0:1;
27760 * @brief Determine Precoding information for TM3 4 TX Antenna.
27764 * Function: rgSCHCmnDlTM3PrecInf4
27765 * Purpose: To determine a logic of deriving precoding index
27766 * information from 36.212 table 5.3.3.1.5A-2
27768 * Invoked by: rgSCHCmnDlGetAttrForTM3
27770 * @param[in] RgSchUeCb *ue
27771 * @param[in] U8 numTxLyrs
27772 * @param[in] Bool bothCwEnbld
27777 PRIVATE U8 rgSCHCmnDlTM3PrecInf4
27785 PRIVATE U8 rgSCHCmnDlTM3PrecInf4(ue, numTxLyrs, bothCwEnbld)
27794 TRC2(rgSCHCmnDlTM3PrecInf4);
27798 precIdx = numTxLyrs - 2;
27800 else /* one 1 CW transmission */
27809 * @brief Determine Precoding information for TM4 4 TX Antenna.
27813 * Function: rgSCHCmnDlTM4PrecInf4
27814 * Purpose: To determine a logic of deriving precoding index
27815 * information from 36.212 table 5.3.3.1.5-5
27817 * Invoked by: rgSCHCmnDlGetAttrForTM4
27819 * @param[in] RgSchUeCb *ue
27820 * @param[in] U8 numTxLyrs
27821 * @param[in] Bool bothCwEnbld
27826 PRIVATE U8 rgSCHCmnDlTM4PrecInf4
27834 PRIVATE U8 rgSCHCmnDlTM4PrecInf4(cell, ue, numTxLyrs, bothCwEnbld)
27841 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27842 U8 precInfoBaseIdx, precIdx;
27844 TRC2(rgSCHCmnDlTM4PrecInf4);
27846 precInfoBaseIdx = (ue->mimoInfo.puschFdbkVld)? (16):
27847 (ueDl->mimoInfo.pmi);
27850 precIdx = precInfoBaseIdx + (numTxLyrs-2)*17;
27852 else /* one 1 CW transmission */
27854 precInfoBaseIdx += 1;
27855 precIdx = precInfoBaseIdx + (numTxLyrs-1)*17;
27862 * @brief This function determines Transmission attributes
27863 * incase of TM3 scheduling.
27867 * Function: rgSCHCmnDlGetAttrForTM3
27868 * Purpose: Determine retx TB and tx TB based on TB states.
27869 * If forceTD enabled
27870 * perform only retx TB allocation.
27871 * If retxTB == TB2 then DCI Frmt = 2A, RA Type = 0.
27872 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
27874 * perform retxTB allocation on CW1.
27876 * Determine further Scope and Swap Flag attributes
27877 * assuming a 2 CW transmission of RetxTB and new Tx TB.
27878 * If no further scope for new TX allocation
27879 * Allocate only retx TB using 2 layers if
27880 * this TB was previously transmitted using 2 layers AND
27881 * number of Tx antenna ports == 4.
27882 * otherwise do single layer precoding.
27884 * Invoked by: rgSCHCmnDlTM3TxRetx
27886 * @param[in] RgSchUeCb *ue
27887 * @param[in] RgSchDlHqProcCb *proc
27888 * @param[out] U8 *numTxLyrs
27889 * @param[out] Bool *isTraDiv
27890 * @param[out] U8 *prcdngInf
27891 * @param[out] U8 *raType
27896 PRIVATE Void rgSCHCmnDlGetAttrForTM3
27900 RgSchDlHqProcCb *proc,
27902 TfuDciFormat *dciFrmt,
27904 RgSchDlHqTbCb **retxTb,
27905 RgSchDlHqTbCb **txTb,
27911 PRIVATE Void rgSCHCmnDlGetAttrForTM3(cell, ue, proc, numTxLyrs, dciFrmt,\
27912 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
27915 RgSchDlHqProcCb *proc;
27917 TfuDciFormat *dciFrmt;
27919 RgSchDlHqTbCb **retxTb;
27920 RgSchDlHqTbCb **txTb;
27926 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
27929 TRC2(rgSCHCmnDlGetAttrForTM3);
27931 /* Avoiding Tx-Retx for LAA cell as firstSchedTime is associated with
27933 /* Integration_fix: SPS Proc shall always have only one Cw */
27935 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
27936 (ueDl->mimoInfo.forceTD))
27938 ||(TRUE == rgSCHLaaSCellEnabled(cell))
27942 if ((ueDl->mimoInfo.forceTD)
27944 || (TRUE == rgSCHLaaSCellEnabled(cell))
27949 /* Transmit Diversity. Format based on dlfsEnabled
27950 * No further scope */
27951 if (proc->tbInfo[0].state == HQ_TB_NACKED)
27953 *retxTb = &proc->tbInfo[0];
27954 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
27958 *retxTb = &proc->tbInfo[1];
27959 *dciFrmt = TFU_DCI_FORMAT_2A;
27960 *raType = RG_SCH_CMN_RA_TYPE0;
27968 /* Determine the 2 TB transmission attributes */
27969 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
27973 /* Prefer allocation of RETX TB over 2 layers rather than combining
27974 * it with a new TX. */
27975 if ((ueDl->mimoInfo.ri == 2)
27976 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
27978 /* Allocate TB on CW1, using 2 Lyrs,
27979 * Format 2, precoding accordingly */
27985 *numTxLyrs= ((*retxTb)->numLyrs + ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)].noLyr);
27987 if((*retxTb)->tbIdx == 0 && ((*retxTb)->numLyrs == 2 ) && *numTxLyrs ==3)
27990 proc->cwSwpEnabled = TRUE;
27992 else if((*retxTb)->tbIdx == 1 && ((*retxTb)->numLyrs == 1) && *numTxLyrs ==3)
27995 proc->cwSwpEnabled = TRUE;
27999 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28000 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])\
28001 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28002 *dciFrmt = TFU_DCI_FORMAT_2A;
28003 *raType = RG_SCH_CMN_RA_TYPE0;
28005 else /* frthrScp == FALSE */
28007 if (cell->numTxAntPorts == 2)
28009 /* Transmit Diversity */
28011 if ((*retxTb)->tbIdx == 0)
28013 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28017 /* If retxTB is TB2 then use format 2A */
28018 *dciFrmt = TFU_DCI_FORMAT_2A;
28019 *raType = RG_SCH_CMN_RA_TYPE0;
28024 else /* NumAntPorts == 4 */
28026 if ((*retxTb)->numLyrs == 2)
28028 /* Allocate TB on CW1, using 2 Lyrs,
28029 * Format 2A, precoding accordingly */
28031 *dciFrmt = TFU_DCI_FORMAT_2A;
28032 *raType = RG_SCH_CMN_RA_TYPE0;
28033 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28034 *prcdngInf = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, *numTxLyrs, *frthrScp);
28039 /* Transmit Diversity */
28041 if ((*retxTb)->tbIdx == 0)
28043 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28047 /* If retxTB is TB2 then use format 2A */
28048 *dciFrmt = TFU_DCI_FORMAT_2A;
28049 *raType = RG_SCH_CMN_RA_TYPE0;
28063 * @brief This function determines Transmission attributes
28064 * incase of TM4 scheduling.
28068 * Function: rgSCHCmnDlGetAttrForTM4
28069 * Purpose: Determine retx TB and tx TB based on TB states.
28070 * If forceTD enabled
28071 * perform only retx TB allocation.
28072 * If retxTB == TB2 then DCI Frmt = 2, RA Type = 0.
28073 * Else DCI Frmt and RA Type based on cell->isDlfsEnbld
28075 * perform retxTB allocation on CW1.
28077 * Determine further Scope and Swap Flag attributes
28078 * assuming a 2 CW transmission of RetxTB and new Tx TB.
28079 * If no further scope for new TX allocation
28080 * Allocate only retx TB using 2 layers if
28081 * this TB was previously transmitted using 2 layers AND
28082 * number of Tx antenna ports == 4.
28083 * otherwise do single layer precoding.
28085 * Invoked by: rgSCHCmnDlTM4TxRetx
28087 * @param[in] RgSchUeCb *ue
28088 * @param[in] RgSchDlHqProcCb *proc
28089 * @param[out] U8 *numTxLyrs
28090 * @param[out] Bool *isTraDiv
28091 * @param[out] U8 *prcdngInf
28092 * @param[out] U8 *raType
28097 PRIVATE Void rgSCHCmnDlGetAttrForTM4
28101 RgSchDlHqProcCb *proc,
28103 TfuDciFormat *dciFrmt,
28105 RgSchDlHqTbCb **retxTb,
28106 RgSchDlHqTbCb **txTb,
28112 PRIVATE Void rgSCHCmnDlGetAttrForTM4(cell, ue, proc, numTxLyrs, dciFrmt,\
28113 prcdngInf, retxTb, txTb, frthrScp, swpFlg, raType)
28116 RgSchDlHqProcCb *proc;
28118 TfuDciFormat *dciFrmt;
28120 RgSchDlHqTbCb **retxTb;
28121 RgSchDlHqTbCb **txTb;
28127 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28130 TRC2(rgSCHCmnDlGetAttrForTM4);
28133 /* Integration_fix: SPS Proc shall always have only one Cw */
28135 if (((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28136 (ueDl->mimoInfo.forceTD))
28138 ||(TRUE == rgSCHLaaSCellEnabled(cell))
28142 if ((ueDl->mimoInfo.forceTD)
28144 || (TRUE == rgSCHLaaSCellEnabled(cell))
28149 /* Transmit Diversity. Format based on dlfsEnabled
28150 * No further scope */
28151 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28153 *retxTb = &proc->tbInfo[0];
28154 *dciFrmt = rgSCHCmnSlctPdcchFrmt(cell, ue, raType);
28158 *retxTb = &proc->tbInfo[1];
28159 *dciFrmt = TFU_DCI_FORMAT_2;
28160 *raType = RG_SCH_CMN_RA_TYPE0;
28168 if (ueDl->mimoInfo.ri == 1)
28170 /* single layer precoding. Format 2.
28171 * No further scope */
28172 if (proc->tbInfo[0].state == HQ_TB_NACKED)
28174 *retxTb = &proc->tbInfo[0];
28178 *retxTb = &proc->tbInfo[1];
28181 *dciFrmt = TFU_DCI_FORMAT_2;
28182 *raType = RG_SCH_CMN_RA_TYPE0;
28184 *prcdngInf = 0; /*When RI= 1*/
28188 /* Determine the 2 TB transmission attributes */
28189 rgSCHCmnDlSMGetAttrForTxRetx(ue, proc, retxTb, txTb, \
28191 *dciFrmt = TFU_DCI_FORMAT_2;
28192 *raType = RG_SCH_CMN_RA_TYPE0;
28195 /* Prefer allocation of RETX TB over 2 layers rather than combining
28196 * it with a new TX. */
28197 if ((ueDl->mimoInfo.ri == 2)
28198 && ((*retxTb)->numLyrs == 2) && (cell->numTxAntPorts == 4))
28200 /* Allocate TB on CW1, using 2 Lyrs,
28201 * Format 2, precoding accordingly */
28205 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28206 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])
28207 (cell, ue, ueDl->mimoInfo.ri, *frthrScp);
28209 else /* frthrScp == FALSE */
28211 if (cell->numTxAntPorts == 2)
28213 /* single layer precoding. Format 2. */
28215 *prcdngInf = (getPrecInfoFunc[1][cell->numTxAntPorts/2 - 1])\
28216 (cell, ue, *numTxLyrs, *frthrScp);
28219 else /* NumAntPorts == 4 */
28221 if ((*retxTb)->numLyrs == 2)
28223 /* Allocate TB on CW1, using 2 Lyrs,
28224 * Format 2, precoding accordingly */
28226 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28227 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28228 (cell, ue, *numTxLyrs, *frthrScp);
28233 /* Allocate TB with 1 lyr precoding,
28234 * Format 2, precoding info accordingly */
28236 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28237 *prcdngInf = (getPrecInfoFunc[1][precInfoAntIdx])\
28238 (cell, ue, *numTxLyrs, *frthrScp);
28249 * @brief This function handles Retx allocation in case of TM3 UEs
28250 * where previously one of the TBs was NACKED and the other
28251 * TB is either ACKED/WAITING.
28255 * Function: rgSCHCmnDlTM3TxRetx
28256 * Purpose: Determine the TX attributes for TM3 TxRetx Allocation.
28257 * If futher Scope for New Tx Allocation on other TB
28258 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28259 * Add UE to cell wide RetxTx List.
28261 * Perform only RETX alloc'n on CW1.
28262 * Add UE to cell wide Retx List.
28264 * effBo is set to a non-zero value if allocation is
28267 * Invoked by: rgSCHCmnDlAllocRbTM3
28269 * @param[in] RgSchCellCb *cell
28270 * @param[in] RgSchDlSf *subFrm
28271 * @param[in] RgSchUeCb *ue
28272 * @param[in] U32 bo
28273 * @param[out] U32 *effBo
28274 * @param[in] RgSchDlHqProcCb *proc
28275 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28280 PRIVATE Void rgSCHCmnDlTM3TxRetx
28287 RgSchDlHqProcCb *proc,
28288 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28291 PRIVATE Void rgSCHCmnDlTM3TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28297 RgSchDlHqProcCb *proc;
28298 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28302 RgSchDlRbAlloc *allocInfo;
28304 RgSchDlHqTbCb *retxTb, *txTb;
28310 TRC2(rgSCHCmnDlTM3TxRetx);
28314 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28317 /* Determine the transmission attributes */
28318 rgSCHCmnDlGetAttrForTM3(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28319 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28320 &allocInfo->raType);
28325 printf ("TX RETX called from proc %d cell %d \n",proc->procId, cell->cellId);
28327 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28329 if (ret == RFAILED)
28331 /* Allocation couldn't be made for Retx */
28332 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28335 /* Adding UE to RbAllocInfo RETX-TX Lst */
28336 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28340 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28341 numTxLyrs, &numRb, effBo);
28342 if (ret == RFAILED)
28344 /* Allocation couldn't be made for Retx */
28345 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28349 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28352 /* Adding UE to allocInfo RETX Lst */
28353 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28356 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28357 prcdngInf, numTxLyrs, subFrm);
28364 * @brief This function handles Retx allocation in case of TM4 UEs
28365 * where previously one of the TBs was NACKED and the other
28366 * TB is either ACKED/WAITING.
28370 * Function: rgSCHCmnDlTM4TxRetx
28371 * Purpose: Determine the TX attributes for TM4 TxRetx Allocation.
28372 * If futher Scope for New Tx Allocation on other TB
28373 * Perform RETX alloc'n on 1 CW and TX alloc'n on other.
28374 * Add UE to cell wide RetxTx List.
28376 * Perform only RETX alloc'n on CW1.
28377 * Add UE to cell wide Retx List.
28379 * effBo is set to a non-zero value if allocation is
28382 * Invoked by: rgSCHCmnDlAllocRbTM4
28384 * @param[in] RgSchCellCb *cell
28385 * @param[in] RgSchDlSf *subFrm
28386 * @param[in] RgSchUeCb *ue
28387 * @param[in] U32 bo
28388 * @param[out] U32 *effBo
28389 * @param[in] RgSchDlHqProcCb *proc
28390 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28395 PRIVATE Void rgSCHCmnDlTM4TxRetx
28402 RgSchDlHqProcCb *proc,
28403 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28406 PRIVATE Void rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28412 RgSchDlHqProcCb *proc;
28413 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28417 RgSchDlRbAlloc *allocInfo;
28419 RgSchDlHqTbCb *retxTb, *txTb;
28425 TRC2(rgSCHCmnDlTM4TxRetx);
28428 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28431 /* Determine the transmission attributes */
28432 rgSCHCmnDlGetAttrForTM4(cell, ue, proc, &numTxLyrs, &allocInfo->dciFormat,\
28433 &prcdngInf, &retxTb, &txTb, &frthrScp, &swpFlg,\
28434 &allocInfo->raType);
28438 ret = rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, retxTb, txTb,\
28440 if (ret == RFAILED)
28442 /* Fix : syed If TxRetx allocation failed then add the UE along
28443 * with the proc to the nonSchdTxRetxUeLst and let spfc scheduler
28444 * take care of it during finalization. */
28445 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28448 /* Adding UE to RbAllocInfo RETX-TX Lst */
28449 rgSCHCmnDlRbInfoAddUeRetxTx(cell, cellWdAllocInfo, ue, proc);
28453 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, retxTb,
28454 numTxLyrs, &numRb, effBo);
28455 if (ret == RFAILED)
28457 /* Allocation couldn't be made for Retx */
28458 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
28462 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28465 /* Adding UE to allocInfo RETX Lst */
28466 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
28469 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, swpFlg, \
28470 prcdngInf, numTxLyrs, subFrm)
28477 * @brief This function handles Retx allocation in case of TM4 UEs
28478 * where previously both the TBs were ACKED and ACKED
28483 * Function: rgSCHCmnDlTM3TxTx
28484 * Purpose: Reached here for a TM3 UE's HqP's fresh allocation
28485 * where both the TBs are free for TX scheduling.
28486 * If forceTD flag is set
28487 * perform TD on CW1 with TB1.
28492 * RI layered precoding 2 TB on 2 CW.
28493 * Set precoding info.
28494 * Add UE to cellAllocInfo.
28495 * Fill ueAllocInfo.
28497 * effBo is set to a non-zero value if allocation is
28500 * Invoked by: rgSCHCmnDlAllocRbTM3
28502 * @param[in] RgSchCellCb *cell
28503 * @param[in] RgSchDlSf *subFrm
28504 * @param[in] RgSchUeCb *ue
28505 * @param[in] U32 bo
28506 * @param[out] U32 *effBo
28507 * @param[in] RgSchDlHqProcCb *proc
28508 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28513 PRIVATE Void rgSCHCmnDlTM3TxTx
28520 RgSchDlHqProcCb *proc,
28521 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28524 PRIVATE Void rgSCHCmnDlTM3TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28530 RgSchDlHqProcCb *proc;
28531 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28534 RgSchCmnDlUe *ueDl;
28535 RgSchDlRbAlloc *allocInfo;
28542 TRC2(rgSCHCmnDlTM3TxTx);
28545 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28546 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28548 /* Integration_fix: SPS Proc shall always have only one Cw */
28550 #ifdef FOUR_TX_ANTENNA
28551 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28552 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28554 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28555 (ueDl->mimoInfo.forceTD))
28558 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28561 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28562 &allocInfo->raType);
28563 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28564 bo, &numRb, effBo);
28565 if (ret == RFAILED)
28567 /* If allocation couldn't be made then return */
28571 precInfo = 0; /* TD */
28573 else /* Precoding */
28575 allocInfo->dciFormat = TFU_DCI_FORMAT_2A;
28576 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28578 /* Spatial Multiplexing using 2 CWs */
28579 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28580 if (ret == RFAILED)
28582 /* If allocation couldn't be made then return */
28585 noTxLyrs = ueDl->mimoInfo.ri;
28586 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28587 RGSCH_ARRAY_BOUND_CHECK(cell->instIdx, getPrecInfoFunc[0], precInfoAntIdx);
28588 precInfo = (getPrecInfoFunc[0][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28592 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28595 /* Adding UE to RbAllocInfo TX Lst */
28596 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28598 /* Fill UE allocInfo scrath pad */
28599 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28600 precInfo, noTxLyrs, subFrm);
28607 * @brief This function handles Retx allocation in case of TM4 UEs
28608 * where previously both the TBs were ACKED and ACKED
28613 * Function: rgSCHCmnDlTM4TxTx
28614 * Purpose: Reached here for a TM4 UE's HqP's fresh allocation
28615 * where both the TBs are free for TX scheduling.
28616 * If forceTD flag is set
28617 * perform TD on CW1 with TB1.
28623 * Single layer precoding of TB1 on CW1.
28624 * Set precoding info.
28626 * RI layered precoding 2 TB on 2 CW.
28627 * Set precoding info.
28628 * Add UE to cellAllocInfo.
28629 * Fill ueAllocInfo.
28631 * effBo is set to a non-zero value if allocation is
28634 * Invoked by: rgSCHCmnDlAllocRbTM4
28636 * @param[in] RgSchCellCb *cell
28637 * @param[in] RgSchDlSf *subFrm
28638 * @param[in] RgSchUeCb *ue
28639 * @param[in] U32 bo
28640 * @param[out] U32 *effBo
28641 * @param[in] RgSchDlHqProcCb *proc
28642 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28647 PRIVATE Void rgSCHCmnDlTM4TxTx
28654 RgSchDlHqProcCb *proc,
28655 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28658 PRIVATE Void rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28664 RgSchDlHqProcCb *proc;
28665 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28668 RgSchCmnDlUe *ueDl;
28669 RgSchDlRbAlloc *allocInfo;
28676 TRC2(rgSCHCmnDlTM4TxTx);
28679 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
28680 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
28682 /* Integration_fix: SPS Proc shall always have only one Cw */
28684 #ifdef FOUR_TX_ANTENNA
28685 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28686 (ueDl->mimoInfo.forceTD) || proc->hasDcch) /*Chandra Avoid DCCH to be SM */
28688 if ((RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc)) ||
28689 (ueDl->mimoInfo.forceTD))
28692 if (ueDl->mimoInfo.forceTD) /* Transmit Diversity (TD) */
28695 allocInfo->dciFormat = rgSCHCmnSlctPdcchFrmt(cell, ue, \
28696 &allocInfo->raType);
28698 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28699 bo, &numRb, effBo);
28700 if (ret == RFAILED)
28702 /* If allocation couldn't be made then return */
28706 precInfo = 0; /* TD */
28708 else /* Precoding */
28710 allocInfo->dciFormat = TFU_DCI_FORMAT_2;
28711 allocInfo->raType = RG_SCH_CMN_RA_TYPE0;
28713 if (ueDl->mimoInfo.ri == 1)
28715 /* Single Layer SM using FORMAT 2 */
28716 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
28717 bo, &numRb, effBo);
28718 if (ret == RFAILED)
28720 /* If allocation couldn't be made then return */
28724 precInfo = 0; /* PrecInfo as 0 for RI=1*/
28728 /* Spatial Multiplexing using 2 CWs */
28729 ret = rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, &numRb, effBo);
28730 if (ret == RFAILED)
28732 /* If allocation couldn't be made then return */
28735 noTxLyrs = ueDl->mimoInfo.ri;
28736 precInfoAntIdx = cell->numTxAntPorts/2 - 1;
28737 precInfo = (getPrecInfoFunc[1][precInfoAntIdx])(cell, ue, noTxLyrs, TRUE);
28743 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
28746 /* Adding UE to RbAllocInfo TX Lst */
28747 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
28750 /* Fill UE allocInfo scrath pad */
28751 RG_SCH_CMN_FILL_DL_TXINFO(allocInfo, numRb, FALSE, \
28752 precInfo, noTxLyrs, subFrm);
28759 * @brief This function determines the RBs and Bytes required for BO
28760 * transmission for UEs configured with TM 4.
28764 * Function: rgSCHCmnDlAllocTxRbTM4
28765 * Purpose: Invokes the functionality particular to the
28766 * current state of the TBs of the "proc".
28768 * Reference Parameter effBo is filled with alloced bytes.
28769 * Returns RFAILED if BO not satisfied at all.
28771 * Invoked by: rgSCHCmnDlAllocTxRb
28773 * @param[in] RgSchCellCb *cell
28774 * @param[in] RgSchDlSf *subFrm
28775 * @param[in] RgSchUeCb *ue
28776 * @param[in] U32 bo
28777 * @param[out] U32 *effBo
28778 * @param[in] RgSchDlHqProcCb *proc
28779 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28784 PRIVATE Void rgSCHCmnDlAllocTxRbTM4
28791 RgSchDlHqProcCb *proc,
28792 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28795 PRIVATE Void rgSCHCmnDlAllocTxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28801 RgSchDlHqProcCb *proc;
28802 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28805 TRC2(rgSCHCmnDlAllocTxRbTM4);
28807 /* Both TBs free for TX allocation */
28808 rgSCHCmnDlTM4TxTx(cell, subFrm, ue, bo, effBo,\
28809 proc, cellWdAllocInfo);
28816 * @brief This function determines the RBs and Bytes required for BO
28817 * retransmission for UEs configured with TM 4.
28821 * Function: rgSCHCmnDlAllocRetxRbTM4
28822 * Purpose: Invokes the functionality particular to the
28823 * current state of the TBs of the "proc".
28825 * Reference Parameter effBo is filled with alloced bytes.
28826 * Returns RFAILED if BO not satisfied at all.
28828 * Invoked by: rgSCHCmnDlAllocRetxRb
28830 * @param[in] RgSchCellCb *cell
28831 * @param[in] RgSchDlSf *subFrm
28832 * @param[in] RgSchUeCb *ue
28833 * @param[in] U32 bo
28834 * @param[out] U32 *effBo
28835 * @param[in] RgSchDlHqProcCb *proc
28836 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28841 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4
28848 RgSchDlHqProcCb *proc,
28849 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28852 PRIVATE Void rgSCHCmnDlAllocRetxRbTM4(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28858 RgSchDlHqProcCb *proc;
28859 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28862 TRC2(rgSCHCmnDlAllocRetxRbTM4);
28864 if ((proc->tbInfo[0].state == HQ_TB_NACKED) &&
28865 (proc->tbInfo[1].state == HQ_TB_NACKED))
28867 /* Both TBs require RETX allocation */
28868 rgSCHCmnDlTM4RetxRetx(cell, subFrm, ue, bo, effBo,\
28869 proc, cellWdAllocInfo);
28873 /* One of the TBs need RETX allocation. Other TB may/maynot
28874 * be available for new TX allocation. */
28875 rgSCHCmnDlTM4TxRetx(cell, subFrm, ue, bo, effBo,\
28876 proc, cellWdAllocInfo);
28885 * @brief This function determines the RBs and Bytes required for BO
28886 * transmission for UEs configured with TM 5.
28890 * Function: rgSCHCmnDlAllocTxRbTM5
28893 * Reference Parameter effBo is filled with alloced bytes.
28894 * Returns RFAILED if BO not satisfied at all.
28896 * Invoked by: rgSCHCmnDlAllocTxRb
28898 * @param[in] RgSchCellCb *cell
28899 * @param[in] RgSchDlSf *subFrm
28900 * @param[in] RgSchUeCb *ue
28901 * @param[in] U32 bo
28902 * @param[out] U32 *effBo
28903 * @param[in] RgSchDlHqProcCb *proc
28904 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28909 PRIVATE Void rgSCHCmnDlAllocTxRbTM5
28916 RgSchDlHqProcCb *proc,
28917 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28920 PRIVATE Void rgSCHCmnDlAllocTxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28926 RgSchDlHqProcCb *proc;
28927 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28930 TRC2(rgSCHCmnDlAllocTxRbTM5);
28931 #if (ERRCLASS & ERRCLS_DEBUG)
28932 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
28939 * @brief This function determines the RBs and Bytes required for BO
28940 * retransmission for UEs configured with TM 5.
28944 * Function: rgSCHCmnDlAllocRetxRbTM5
28947 * Reference Parameter effBo is filled with alloced bytes.
28948 * Returns RFAILED if BO not satisfied at all.
28950 * Invoked by: rgSCHCmnDlAllocRetxRb
28952 * @param[in] RgSchCellCb *cell
28953 * @param[in] RgSchDlSf *subFrm
28954 * @param[in] RgSchUeCb *ue
28955 * @param[in] U32 bo
28956 * @param[out] U32 *effBo
28957 * @param[in] RgSchDlHqProcCb *proc
28958 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28963 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5
28970 RgSchDlHqProcCb *proc,
28971 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
28974 PRIVATE Void rgSCHCmnDlAllocRetxRbTM5(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
28980 RgSchDlHqProcCb *proc;
28981 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
28984 TRC2(rgSCHCmnDlAllocRetxRbTM5);
28985 #if (ERRCLASS & ERRCLS_DEBUG)
28986 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Invalid TM 5 for CRNTI:%d",ue->ueId);
28994 * @brief This function determines the RBs and Bytes required for BO
28995 * transmission for UEs configured with TM 6.
28999 * Function: rgSCHCmnDlAllocTxRbTM6
29002 * Reference Parameter effBo is filled with alloced bytes.
29003 * Returns RFAILED if BO not satisfied at all.
29005 * Invoked by: rgSCHCmnDlAllocTxRb
29007 * @param[in] RgSchCellCb *cell
29008 * @param[in] RgSchDlSf *subFrm
29009 * @param[in] RgSchUeCb *ue
29010 * @param[in] U32 bo
29011 * @param[out] U32 *effBo
29012 * @param[in] RgSchDlHqProcCb *proc
29013 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29018 PRIVATE Void rgSCHCmnDlAllocTxRbTM6
29025 RgSchDlHqProcCb *proc,
29026 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29029 PRIVATE Void rgSCHCmnDlAllocTxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29035 RgSchDlHqProcCb *proc;
29036 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29039 RgSchDlRbAlloc *allocInfo;
29040 RgSchCmnDlUe *ueDl;
29044 TRC2(rgSCHCmnDlAllocTxRbTM6);
29047 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29048 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29050 if (ueDl->mimoInfo.forceTD)
29052 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29053 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29057 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29058 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29059 /* Fill precoding information for FORMAT 1B */
29060 /* First 4 least significant bits to indicate PMI.
29061 * 4th most significant corresponds to pmi Confirmation.
29063 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29064 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29066 ret = rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, &proc->tbInfo[0],\
29067 bo, &numRb, effBo);
29068 if (ret == RFAILED)
29070 /* If allocation couldn't be made then return */
29075 if (!RG_SCH_CMN_SPS_DL_IS_SPS_HQP(proc))
29078 /* Adding UE to RbAllocInfo TX Lst */
29079 rgSCHCmnDlRbInfoAddUeTx(cell, cellWdAllocInfo, ue, proc);
29081 /* Fill UE alloc Info */
29082 allocInfo->rbsReq = numRb;
29083 allocInfo->dlSf = subFrm;
29089 * @brief This function determines the RBs and Bytes required for BO
29090 * retransmission for UEs configured with TM 6.
29094 * Function: rgSCHCmnDlAllocRetxRbTM6
29097 * Reference Parameter effBo is filled with alloced bytes.
29098 * Returns RFAILED if BO not satisfied at all.
29100 * Invoked by: rgSCHCmnDlAllocRetxRb
29102 * @param[in] RgSchCellCb *cell
29103 * @param[in] RgSchDlSf *subFrm
29104 * @param[in] RgSchUeCb *ue
29105 * @param[in] U32 bo
29106 * @param[out] U32 *effBo
29107 * @param[in] RgSchDlHqProcCb *proc
29108 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29113 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6
29120 RgSchDlHqProcCb *proc,
29121 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29124 PRIVATE Void rgSCHCmnDlAllocRetxRbTM6(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29130 RgSchDlHqProcCb *proc;
29131 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29134 RgSchDlRbAlloc *allocInfo;
29135 RgSchCmnDlUe *ueDl;
29139 TRC2(rgSCHCmnDlAllocRetxRbTM6);
29142 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29143 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29145 if (ueDl->mimoInfo.forceTD)
29147 allocInfo->dciFormat = TFU_DCI_FORMAT_1A;
29148 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29152 allocInfo->dciFormat = TFU_DCI_FORMAT_1B;
29153 allocInfo->raType = RG_SCH_CMN_RA_TYPE2;
29154 /* Fill precoding information for FORMAT 1B */
29155 /* First 4 least significant bits to indicate PMI.
29156 * 4th most significant corresponds to pmi Confirmation.
29158 allocInfo->mimoAllocInfo.precIdxInfo |= ue->mimoInfo.puschFdbkVld << 4;
29159 allocInfo->mimoAllocInfo.precIdxInfo |= ueDl->mimoInfo.pmi;
29162 /* Get the Allocation in terms of RBs that are required for
29163 * this retx of TB1 */
29164 ret = rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, &proc->tbInfo[0],
29166 if (ret == RFAILED)
29168 /* Allocation couldn't be made for Retx */
29169 rgSCHCmnDlAdd2NonSchdRetxLst(cellWdAllocInfo, ue, proc);
29172 /* Adding UE to allocInfo RETX Lst */
29173 rgSCHCmnDlRbInfoAddUeRetx(cell, cellWdAllocInfo, ue, proc);
29174 /* Fill UE alloc Info */
29175 allocInfo->rbsReq = numRb;
29176 allocInfo->dlSf = subFrm;
29182 * @brief This function determines the RBs and Bytes required for BO
29183 * transmission for UEs configured with TM 7.
29187 * Function: rgSCHCmnDlAllocTxRbTM7
29190 * Reference Parameter effBo is filled with alloced bytes.
29191 * Returns RFAILED if BO not satisfied at all.
29193 * Invoked by: rgSCHCmnDlAllocTxRb
29195 * @param[in] RgSchCellCb *cell
29196 * @param[in] RgSchDlSf *subFrm
29197 * @param[in] RgSchUeCb *ue
29198 * @param[in] U32 bo
29199 * @param[out] U32 *effBo
29200 * @param[in] RgSchDlHqProcCb *proc
29201 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29206 PRIVATE Void rgSCHCmnDlAllocTxRbTM7
29213 RgSchDlHqProcCb *proc,
29214 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29217 PRIVATE Void rgSCHCmnDlAllocTxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29223 RgSchDlHqProcCb *proc;
29224 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29227 TRC2(rgSCHCmnDlAllocTxRbTM7);
29228 rgSCHCmnDlAllocTxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29234 * @brief This function determines the RBs and Bytes required for BO
29235 * retransmission for UEs configured with TM 7.
29239 * Function: rgSCHCmnDlAllocRetxRbTM7
29242 * Reference Parameter effBo is filled with alloced bytes.
29243 * Returns RFAILED if BO not satisfied at all.
29245 * Invoked by: rgSCHCmnDlAllocRetxRb
29247 * @param[in] RgSchCellCb *cell
29248 * @param[in] RgSchDlSf *subFrm
29249 * @param[in] RgSchUeCb *ue
29250 * @param[in] U32 bo
29251 * @param[out] U32 *effBo
29252 * @param[in] RgSchDlHqProcCb *proc
29253 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29258 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7
29265 RgSchDlHqProcCb *proc,
29266 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29269 PRIVATE Void rgSCHCmnDlAllocRetxRbTM7(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29275 RgSchDlHqProcCb *proc;
29276 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29279 TRC2(rgSCHCmnDlAllocRetxRbTM7);
29280 rgSCHCmnDlAllocRetxRb1Tb1Cw(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo);
29286 * @brief This function invokes the TM specific DL TX RB Allocation routine.
29290 * Function: rgSCHCmnDlAllocTxRb
29291 * Purpose: This function invokes the TM specific
29292 * DL TX RB Allocation routine.
29294 * Invoked by: Specific Schedulers
29296 * @param[in] RgSchCellCb *cell
29297 * @param[in] RgSchDlSf *subFrm
29298 * @param[in] RgSchUeCb *ue
29299 * @param[in] U32 bo
29300 * @param[out] U32 *effBo
29301 * @param[in] RgSchDlHqProcCb *proc
29302 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29307 PUBLIC S16 rgSCHCmnDlAllocTxRb
29314 RgSchDlHqProcCb *proc,
29315 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29318 PUBLIC S16 rgSCHCmnDlAllocTxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29324 RgSchDlHqProcCb *proc;
29325 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29328 U32 newSchBits = 0;
29329 U32 prevSchBits = 0;
29330 RgSchDlRbAlloc *allocInfo;
29332 TRC2(rgSCHCmnDlAllocTxRb);
29334 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29336 ue->dl.aggTbBits = 0;
29340 /* Calculate totals bits previously allocated */
29341 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29342 if (allocInfo->tbInfo[0].schdlngForTb)
29344 prevSchBits += allocInfo->tbInfo[0].bytesReq;
29346 if (allocInfo->tbInfo[1].schdlngForTb)
29348 prevSchBits += allocInfo->tbInfo[1].bytesReq;
29351 /* Call TM specific RB allocation routine */
29352 (dlAllocTxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29353 proc, cellWdAllocInfo);
29357 /* Calculate totals bits newly allocated */
29358 if (allocInfo->tbInfo[0].schdlngForTb)
29360 newSchBits += allocInfo->tbInfo[0].bytesReq;
29362 if (allocInfo->tbInfo[1].schdlngForTb)
29364 newSchBits += allocInfo->tbInfo[1].bytesReq;
29366 if (newSchBits > prevSchBits)
29368 ue->dl.aggTbBits += ((newSchBits - prevSchBits) * 8);
29369 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29376 /* DwPTS Scheduling Changes Start */
29379 * @brief Retransmit decision for TDD. Retx is avoided in below cases
29380 * 1) DL Sf -> Spl Sf
29381 * 2) DL SF -> DL SF 0
29385 * Function: rgSCHCmnRetxAvoidTdd
29386 * Purpose: Avoid allocating RETX for cases 1, 2
29388 * Invoked by: rgSCHCmnRetxAvoidTdd
29390 * @param[in] RgSchDlSf *curSf
29391 * @param[in] RgSchCellCb *cell
29392 * @param[in] RgSchDlHqProcCb *proc
29397 PUBLIC Bool rgSCHCmnRetxAvoidTdd
29401 RgSchDlHqProcCb *proc
29404 PUBLIC Bool rgSCHCmnRetxAvoidTdd(curSf, cell, proc)
29407 RgSchDlHqProcCb *proc;
29410 RgSchTddSfType txSfType = 0;
29412 TRC2(rgSCHCmnRetxAvoidTdd);
29414 /* Get the RBs of TB that will be retransmitted */
29415 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29417 txSfType = proc->tbInfo[0].sfType;
29419 #ifdef XEON_SPECIFIC_CHANGES
29420 #ifndef XEON_TDD_SPCL
29421 /* Avoid re-transmission on Normal SF when the corresponding TB wss transmitted on SPCL SF */
29422 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29429 if (proc->tbInfo[1].state == HQ_TB_NACKED)
29431 /* Select the TxSf with the highest num of possible REs
29432 * In ascending order -> 1) SPL SF 2) DL_SF_0 3) DL_SF */
29433 txSfType = RGSCH_MAX(txSfType, proc->tbInfo[1].sfType);
29435 #ifdef XEON_SPECIFIC_CHANGES
29436 #ifndef XEON_TDD_SPCL
29437 /* Avoid re-transmission on Normal SF when the corresponding TB wss tranmitted on SPCL SF */
29438 if(txSfType <= RG_SCH_SPL_SF_DATA && curSf->sfType >= RG_SCH_DL_SF_0)
29446 if (txSfType > curSf->sfType)
29457 /* DwPTS Scheduling Changes End */
29460 * @brief Avoid allocating RETX incase of collision
29461 * with reserved resources for BCH/PSS/SSS occassions.
29465 * Function: rgSCHCmnRetxAllocAvoid
29466 * Purpose: Avoid allocating RETX incase of collision
29467 * with reserved resources for BCH/PSS/SSS occassions
29469 * Invoked by: rgSCHCmnDlAllocRetxRb
29471 * @param[in] RgSchDlSf *subFrm
29472 * @param[in] RgSchUeCb *ue
29473 * @param[in] RgSchDlHqProcCb *proc
29478 PUBLIC Bool rgSCHCmnRetxAllocAvoid
29482 RgSchDlHqProcCb *proc
29485 PUBLIC Bool rgSCHCmnRetxAllocAvoid(subFrm, cell, proc)
29488 RgSchDlHqProcCb *proc;
29493 TRC2(rgSCHCmnRetxAllocAvoid);
29495 if (proc->tbInfo[0].state == HQ_TB_NACKED)
29497 reqRbs = proc->tbInfo[0].dlGrnt.numRb;
29501 reqRbs = proc->tbInfo[1].dlGrnt.numRb;
29503 /* Consider the dlGrnt.numRb of the Retransmitting proc->tbInfo
29504 * and current available RBs to determine if this RETX TB
29505 * will collide with the BCH/PSS/SSS occassion */
29506 if (subFrm->sfNum % 5 == 0)
29508 if ((subFrm->bwAssigned < cell->pbchRbEnd) &&
29509 (((subFrm->bwAssigned + reqRbs) - cell->pbchRbStart) > 0))
29521 * @brief This function invokes the TM specific DL RETX RB Allocation routine.
29525 * Function: rgSCHCmnDlAllocRetxRb
29526 * Purpose: This function invokes the TM specific
29527 * DL RETX RB Allocation routine.
29529 * Invoked by: Specific Schedulers
29531 * @param[in] RgSchCellCb *cell
29532 * @param[in] RgSchDlSf *subFrm
29533 * @param[in] RgSchUeCb *ue
29534 * @param[in] U32 bo
29535 * @param[out] U32 *effBo
29536 * @param[in] RgSchDlHqProcCb *proc
29537 * @param[out] RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29542 PUBLIC S16 rgSCHCmnDlAllocRetxRb
29549 RgSchDlHqProcCb *proc,
29550 RgSchCmnDlRbAllocInfo *cellWdAllocInfo
29553 PUBLIC S16 rgSCHCmnDlAllocRetxRb(cell, subFrm, ue, bo, effBo, proc, cellWdAllocInfo)
29559 RgSchDlHqProcCb *proc;
29560 RgSchCmnDlRbAllocInfo *cellWdAllocInfo;
29563 U32 newSchBits = 0;
29564 RgSchDlRbAlloc *allocInfo;
29566 TRC2(rgSCHCmnDlAllocRetxRb);
29568 if ( !RGSCH_TIMEINFO_SAME((cell->crntTime),(ue->dl.lstSchTime) ))
29570 ue->dl.aggTbBits = 0;
29574 /* Check for DL BW exhaustion */
29575 if (subFrm->bw <= subFrm->bwAssigned)
29579 /* Call TM specific RB allocation routine */
29580 (dlAllocRetxRbFunc[ue->mimoInfo.txMode - 1])(cell, subFrm, ue, bo, effBo, \
29581 proc, cellWdAllocInfo);
29585 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29586 /* Calculate totals bits newly allocated */
29587 if (allocInfo->tbInfo[0].schdlngForTb)
29589 newSchBits += allocInfo->tbInfo[0].bytesReq;
29591 if (allocInfo->tbInfo[1].schdlngForTb)
29593 newSchBits += allocInfo->tbInfo[1].bytesReq;
29595 ue->dl.aggTbBits += (newSchBits * 8);
29596 RGSCHCPYTIMEINFO((cell->crntTime),(ue->dl.lstSchTime))
29604 * @brief This function determines the RBs and Bytes required for
29605 * Transmission on 1 CW.
29609 * Function: rgSCHCmnDlAlloc1CwTxRb
29610 * Purpose: This function determines the RBs and Bytes required
29611 * for Transmission of DL SVC BO on 1 CW.
29612 * Also, takes care of SVC by SVC allocation by tracking
29613 * previous SVCs allocations.
29614 * Returns RFAILED if BO not satisfied at all.
29616 * Invoked by: DL UE Allocation
29618 * @param[in] RgSchCellCb *cell
29619 * @param[in] RgSchDlSf *subFrm
29620 * @param[in] RgSchUeCb *ue
29621 * @param[in] RgSchDlHqTbCb *tbInfo
29622 * @param[in] U32 bo
29623 * @param[out] U8 *numRb
29624 * @param[out] U32 *effBo
29629 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb
29634 RgSchDlHqTbCb *tbInfo,
29640 PRIVATE S16 rgSCHCmnDlAlloc1CwTxRb(cell, subFrm, ue, tbInfo, bo, numRb, effBo)
29644 RgSchDlHqTbCb *tbInfo;
29653 RgSchCmnDlUe *ueDl;
29654 RgSchDlRbAlloc *allocInfo;
29657 /* Correcting wrap around issue.
29658 * This change has been done at mutliple places in this function.*/
29660 TRC2(rgSCHCmnDlAlloc1CwTxRb);
29663 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29664 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29665 oldReq = ueDl->outStndAlloc;
29668 //TODO_SID: Currently setting max Tb size wrt to 5GTF TM3
29669 iTbs = ue->ue5gtfCb.mcs;
29670 ueDl->maxTbSz = MAX_5GTF_TB_SIZE * ue->ue5gtfCb.rank;
29671 ueDl->maxRb = MAX_5GTF_PRBS;
29673 ueDl->outStndAlloc += bo;
29674 /* consider Cumulative amount of this BO and bytes so far allocated */
29675 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbSz/8);
29676 /* Get the number of REs needed for this bo. */
29677 //noRes = ((bo * 8 * 1024) / eff);
29679 /* Get the number of RBs needed for this transmission */
29680 /* Number of RBs = No of REs / No of REs per RB */
29681 //tempNumRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29682 tempNumRb = MAX_5GTF_PRBS;
29683 tbSz = RGSCH_MIN(bo, (rgSch5gtfTbSzTbl[iTbs]/8) * ue->ue5gtfCb.rank);
29685 /* DwPts Scheduling Changes End */
29686 *effBo = RGSCH_MIN(tbSz - oldReq, reqBytes);
29689 //RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs);
29694 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbSz, \
29695 iTbs, imcs, tbInfo, ue->ue5gtfCb.rank);
29696 *numRb = (U8) tempNumRb;
29698 /* Update the subframe Allocated BW field */
29699 subFrm->bwAssigned = subFrm->bwAssigned + tempNumRb - allocInfo->rbsReq;
29706 * @brief This function is invoked in the event of any TB's allocation
29707 * being underutilized by the specific scheduler. Here we reduce iMcs
29708 * to increase redundancy and hence increase reception quality at UE.
29712 * Function: rgSCHCmnRdcImcsTxTb
29713 * Purpose: This function shall reduce the iMcs in accordance with
29714 * the total consumed bytes by the UE at allocation
29717 * Invoked by: UE DL Allocation finalization routine
29718 * of specific scheduler.
29720 * @param[in] RgSchDlRbAlloc *allocInfo
29721 * @param[in] U8 tbInfoIdx
29722 * @param[in] U32 cnsmdBytes
29727 PUBLIC Void rgSCHCmnRdcImcsTxTb
29729 RgSchDlRbAlloc *allocInfo,
29734 PUBLIC Void rgSCHCmnRdcImcsTxTb(allocInfo, tbInfoIdx, cnsmdBytes)
29735 RgSchDlRbAlloc *allocInfo;
29741 /*The below functionality is not needed.*/
29746 TRC2(rgSCHCmnRdcImcsTxTb);
29748 iTbs = allocInfo->tbInfo[tbInfoIdx].iTbs;
29749 noLyr = allocInfo->tbInfo[tbInfoIdx].noLyr;
29750 numRb = allocInfo->rbsAlloc;
29753 if ((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) == cnsmdBytes)
29758 /* Get iTbs as suitable for the consumed bytes */
29759 while((rgTbSzTbl[noLyr-1][iTbs][numRb-1]/8) > cnsmdBytes)
29763 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].\
29764 tbCb->dlGrnt.iMcs);
29770 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, allocInfo->tbInfo[tbInfoIdx].tbCb->dlGrnt.iMcs);
29777 * @brief This function determines the RBs and Bytes required for
29778 * Transmission on 2 CWs.
29782 * Function: rgSCHCmnDlAlloc2CwTxRb
29783 * Purpose: This function determines the RBs and Bytes required
29784 * for Transmission of DL SVC BO on 2 CWs.
29785 * Also, takes care of SVC by SVC allocation by tracking
29786 * previous SVCs allocations.
29787 * Returns RFAILED if BO not satisfied at all.
29789 * Invoked by: TM3 and TM4 DL UE Allocation
29791 * @param[in] RgSchCellCb *cell
29792 * @param[in] RgSchDlSf *subFrm
29793 * @param[in] RgSchUeCb *ue
29794 * @param[in] RgSchDlHqProcCb *proc
29795 * @param[in] RgSchDlHqProcCb bo
29796 * @param[out] U8 *numRb
29797 * @param[out] U32 *effBo
29802 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb
29807 RgSchDlHqProcCb *proc,
29813 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRb(cell, subFrm, ue, proc, bo, numRbRef, effBo)
29817 RgSchDlHqProcCb *proc;
29829 RgSchCmnDlCell *cellDl;
29830 RgSchCmnDlUe *ueDl;
29831 RgSchDlRbAlloc *allocInfo;
29834 /* Fix: MUE_PERTTI_DL */
29836 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
29837 U8 cfi = cellSch->dl.currCfi;
29844 TRC2(rgSCHCmnDlAlloc2CwTxRb);
29847 cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
29848 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
29849 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
29850 oldReq = ueDl->outStndAlloc;
29853 if (ueDl->maxTbBits > ue->dl.aggTbBits)
29855 availBits = ueDl->maxTbBits - ue->dl.aggTbBits;
29857 /* check if we can further allocate to this UE */
29858 if ((ue->dl.aggTbBits >= ueDl->maxTbBits) ||
29859 (allocInfo->tbInfo[0].bytesReq >= ueDl->maxTbSz/8) ||
29860 (allocInfo->tbInfo[1].bytesReq >= ueDl->maxTbSz/8) ||
29861 (allocInfo->rbsReq >= ueDl->maxRb))
29863 RLOG_ARG0(L_DEBUG,DBG_CELLID,cell->cellId,
29864 "rgSCHCmnDlAllocRb(): UEs max allocation exceed");
29868 noLyr1 = ueDl->mimoInfo.cwInfo[0].noLyr;
29869 noLyr2 = ueDl->mimoInfo.cwInfo[1].noLyr;
29871 /* If there is no CFI change, continue to use the BLER based
29873 if (ueDl->lastCfi == cfi)
29875 iTbs1 = ueDl->mimoInfo.cwInfo[0].iTbs[noLyr1 - 1];
29876 iTbs2 = ueDl->mimoInfo.cwInfo[1].iTbs[noLyr2 - 1];
29880 U8 cqi = ueDl->mimoInfo.cwInfo[0].cqi;
29882 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 0, noLyr1);
29884 iTbs1 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 0, noLyr1);
29887 cqi = ueDl->mimoInfo.cwInfo[1].cqi;
29889 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, cqi, cfi, 1, noLyr2);
29891 iTbs2 = (U8) rgSchCmnFetchItbs(cell, ueDl, cqi, cfi, 1, noLyr2);
29895 /*ccpu00131191 and ccpu00131317 - Fix for RRC Reconfig failure
29896 * issue for VoLTE call */
29897 //if ((proc->hasDcch) || (TRUE == rgSCHLaaSCellEnabled(cell)))
29917 else if(!cellSch->dl.isDlFreqSel)
29920 /* for Tdd reduce iTbs only for SF0. SF5 contains only
29921 * SSS and can be ignored */
29922 if (subFrm->sfNum == 0)
29924 (iTbs1 > 1)? (iTbs1 -= 1) : (iTbs1 = 0);
29925 (iTbs2 > 1)? (iTbs2 -= 1) : (iTbs2 = 0);
29927 /* For SF 3 and 8 CRC is getting failed in DL.
29928 Need to do proper fix after the replay from
29930 #ifdef CA_PHY_BRDCM_61765
29931 if ((subFrm->sfNum == 3) || (subFrm->sfNum == 8))
29933 (iTbs1 > 2)? (iTbs1 -= 2) : (iTbs1 = 0);
29934 (iTbs2 > 2)? (iTbs2 -= 2) : (iTbs2 = 0);
29942 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
29944 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
29948 eff1 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr1 - 1][cfi]))[iTbs1];
29949 eff2 = (*(RgSchCmnTbSzEff *)(cellSch->dl.cqiToEffTbl[noLyr2 - 1][cfi]))[iTbs2];
29952 bo = RGSCH_MIN(bo,availBits/8);
29953 ueDl->outStndAlloc += bo;
29954 /* consider Cumulative amount of this BO and bytes so far allocated */
29955 bo = RGSCH_MIN(ueDl->outStndAlloc, ueDl->maxTbBits/8);
29956 bo = RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff1)/(eff1+eff2)),
29958 RGSCH_MIN(RGSCH_MAX(RGSCH_CMN_MIN_GRNT_HDR, (bo*eff2)/(eff1+eff2)),
29959 (ueDl->maxTbSz)/8) +
29960 1; /* Add 1 to adjust the truncation at weighted averaging */
29961 /* Get the number of REs needed for this bo. */
29962 noRes = ((bo * 8 * 1024) / (eff1 + eff2));
29964 /* Get the number of RBs needed for this transmission */
29965 /* Number of RBs = No of REs / No of REs per RB */
29966 numRb = RGSCH_CEIL(noRes, cellDl->noResPerRb[cfi]);
29967 /* Cannot exceed the maximum number of RBs per UE */
29968 if (numRb > ueDl->maxRb)
29970 numRb = ueDl->maxRb;
29975 if(RFAILED == rgSCHLaaCmn2CwAdjustPrb(allocInfo, boTmp, &numRb, ueDl, noLyr1, noLyr2, iTbs1, iTbs2))
29978 while ((numRb <= ueDl->maxRb) &&
29979 (rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1] <= ueDl->maxTbSz) &&
29980 (rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1] <= ueDl->maxTbSz) &&
29981 ((rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8 +
29982 rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8) <= bo))
29988 availBw = subFrm->bw - subFrm->bwAssigned;
29989 /* Cannot exceed the total number of RBs in the cell */
29990 if ((S16)(numRb - allocInfo->rbsReq) > availBw)
29992 numRb = availBw + allocInfo->rbsReq;
29994 tb1Sz = rgTbSzTbl[noLyr1 - 1][iTbs1][numRb-1]/8;
29995 tb2Sz = rgTbSzTbl[noLyr2 - 1][iTbs2][numRb-1]/8;
29996 /* DwPts Scheduling Changes Start */
29998 if(subFrm->sfType == RG_SCH_SPL_SF_DATA)
30000 /* Max Rb for Special Sf is approximated as 4/3 of maxRb */
30001 rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, (U8*)&numRb, ueDl->maxRb*4/3,
30002 &iTbs1, &iTbs2, noLyr1,
30003 noLyr2, &tb1Sz, &tb2Sz, cfi);
30004 /* Check for available Bw */
30005 if ((S16)numRb - allocInfo->rbsReq > availBw)
30007 numRb = availBw + allocInfo->rbsReq;
30008 tb1Sz = rgTbSzTbl[noLyr1-1][iTbs1][RGSCH_MAX(numRb*3/4,1)-1]/8;
30009 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs2][RGSCH_MAX(numRb*3/4,1)-1]/8;
30013 /* DwPts Scheduling Changes End */
30014 /* Update the subframe Allocated BW field */
30015 subFrm->bwAssigned = subFrm->bwAssigned + numRb - \
30018 *effBo = RGSCH_MIN((tb1Sz + tb2Sz) - oldReq, reqBytes);
30021 if (ROK != rgSCHLaaCmn2TBPrbCheck(allocInfo, tb1Sz, tb2Sz, boTmp, effBo, iTbs1, iTbs2, numRb, proc))
30027 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs1, imcs1);
30028 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs2, imcs2);
30029 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tb1Sz, \
30030 iTbs1, imcs1, &proc->tbInfo[0], noLyr1);
30031 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30032 iTbs2, imcs2, &proc->tbInfo[1], noLyr2);
30033 *numRbRef = (U8)numRb;
30041 * @brief This function determines the RBs and Bytes required for
30042 * Transmission & Retransmission on 2 CWs.
30046 * Function: rgSCHCmnDlAlloc2CwTxRetxRb
30047 * Purpose: This function determines the RBs and Bytes required
30048 * for Transmission & Retransmission on 2 CWs. Allocate
30049 * RETX TB on a better CW and restrict new TX TB by
30051 * Returns RFAILED if BO not satisfied at all.
30053 * Invoked by: TM3 and TM4 DL UE Allocation
30055 * @param[in] RgSchCellCb *cell
30056 * @param[in] RgSchDlSf *subFrm
30057 * @param[in] RgSchUeCb *ue
30058 * @param[in] RgSchDlHqTbCb *reTxTb
30059 * @param[in] RgSchDlHqTbCb *txTb
30060 * @param[out] U8 *numRb
30061 * @param[out] U32 *effBo
30066 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb
30071 RgSchDlHqTbCb *reTxTb,
30072 RgSchDlHqTbCb *txTb,
30077 PRIVATE S16 rgSCHCmnDlAlloc2CwTxRetxRb(cell, subFrm, ue, reTxTb, txTb, numRb,\
30082 RgSchDlHqTbCb *reTxTb;
30083 RgSchDlHqTbCb *txTb;
30088 RgSchCmnDlUe *ueDl;
30089 RgSchDlRbAlloc *allocInfo;
30093 RgSchCmnDlUeCwInfo *otherCw;
30095 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
30096 U8 cfi = cellDl->currCfi;
30099 TRC2(rgSCHCmnDlAlloc2CwTxRetxRb);
30101 ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
30102 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30103 otherCw = &ueDl->mimoInfo.cwInfo[!(ueDl->mimoInfo.btrCwIdx)];
30106 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30107 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30109 availBw = subFrm->bw - subFrm->bwAssigned;
30110 *numRb = reTxTb->dlGrnt.numRb;
30112 #ifdef XEON_TDD_SPCL
30113 *numRb = (reTxTb->initTxNumRbs);
30114 if(reTxTb->sfType == RG_SCH_SPL_SF_DATA && subFrm->sfType != RG_SCH_SPL_SF_DATA)
30116 *numRb = (reTxTb->initTxNumRbs*3/4);
30120 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30126 if ((S16)*numRb > availBw)
30130 /* Update the subframe Allocated BW field */
30131 subFrm->bwAssigned += *numRb;
30132 noLyr2 = otherCw->noLyr;
30133 RG_SCH_CMN_GET_MCS_FOR_RETX(reTxTb, imcs1);
30135 /* If there is no CFI change, continue to use the BLER based
30137 if (ueDl->lastCfi == cfi)
30139 iTbs = otherCw->iTbs[noLyr2-1];
30144 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, subFrm, otherCw->cqi, cfi,
30145 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30147 iTbs = (U8) rgSchCmnFetchItbs(cell, ueDl, otherCw->cqi, cfi,
30148 !(ueDl->mimoInfo.btrCwIdx), noLyr2);
30151 tb2Sz = rgTbSzTbl[noLyr2-1][iTbs][*numRb-1]/8;
30152 /* DwPts Scheduling Changes Start */
30155 /* DwPts Scheduling Changes End */
30156 RG_SCH_CMN_DL_TBS_TO_MCS(iTbs, imcs2);
30158 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], reTxTb->tbSz, \
30159 0, imcs1, reTxTb, reTxTb->numLyrs);
30161 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], tb2Sz, \
30162 iTbs, imcs2, txTb, noLyr2);
30164 *effBo = reTxTb->tbSz + tb2Sz;
30171 * @brief This function determines the RBs and Bytes required for BO
30172 * Retransmission on 2 CWs.
30176 * Function: rgSCHCmnDlAlloc2CwRetxRb
30177 * Purpose: This function determines the RBs and Bytes required
30178 * for BO Retransmission on 2 CWs. Allocate larger TB
30179 * on a better CW and check if the smaller TB can be
30180 * accomodated on the other CW.
30181 * Returns RFAILED if BO not satisfied at all.
30183 * Invoked by: Common Scheduler
30185 * @param[in] RgSchCellCb *cell
30186 * @param[in] RgSchDlSf *subFrm
30187 * @param[in] RgSchUeCb *ue
30188 * @param[in] RgSchDlHqProcCb *proc
30189 * @param[out] U8 *numRb
30190 * @param[out] Bool *swpFlg
30191 * @param[out] U32 *effBo
30196 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb
30201 RgSchDlHqProcCb *proc,
30207 PRIVATE S16 rgSCHCmnDlAlloc2CwRetxRb(cell, subFrm, ue, proc,\
30208 numRb, swpFlg, effBo)
30212 RgSchDlHqProcCb *proc;
30218 RgSchDlRbAlloc *allocInfo;
30221 RgSchDlHqTbCb *lrgTbInfo, *othrTbInfo;
30223 TRC2(rgSCHCmnDlAlloc2CwRetxRb);
30225 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30228 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30229 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30231 lrgTbInfo = &proc->tbInfo[0];
30232 othrTbInfo = &proc->tbInfo[1];
30233 *numRb = lrgTbInfo->dlGrnt.numRb;
30234 #ifdef XEON_TDD_SPCL
30235 if((lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA || othrTbInfo->sfType == RG_SCH_SPL_SF_DATA))
30237 if(lrgTbInfo->sfType == RG_SCH_SPL_SF_DATA)
30239 *numRb = (lrgTbInfo->initTxNumRbs);
30243 *numRb = (othrTbInfo->initTxNumRbs);
30246 if(subFrm->sfType != RG_SCH_SPL_SF_DATA)
30248 *numRb = (*numRb)*3/4;
30253 RLOG1(L_ERROR," Number of RBs [%d] are less than or equal to 3",*numRb);
30258 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30262 /* Update the subframe Allocated BW field */
30263 subFrm->bwAssigned += *numRb;
30264 RG_SCH_CMN_GET_MCS_FOR_RETX(lrgTbInfo, imcs1);
30265 RG_SCH_CMN_GET_MCS_FOR_RETX(othrTbInfo, imcs2);
30266 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], lrgTbInfo->tbSz, \
30267 0, imcs1, lrgTbInfo, lrgTbInfo->numLyrs);
30268 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[1], othrTbInfo->tbSz, \
30269 0, imcs2, othrTbInfo, othrTbInfo->numLyrs);
30270 *effBo = lrgTbInfo->tbSz + othrTbInfo->tbSz;
30279 * @brief This function determines the RBs and Bytes required for BO
30280 * Retransmission on 1 CW.
30284 * Function: rgSCHCmnDlAlloc1CwRetxRb
30285 * Purpose: This function determines the RBs and Bytes required
30286 * for BO Retransmission on 1 CW, the first CW.
30287 * Returns RFAILED if BO not satisfied at all.
30289 * Invoked by: Common Scheduler
30291 * @param[in] RgSchCellCb *cell
30292 * @param[in] RgSchDlSf *subFrm
30293 * @param[in] RgSchUeCb *ue
30294 * @param[in] RgSchDlHqTbCb *tbInfo
30295 * @param[in] U8 noLyr
30296 * @param[out] U8 *numRb
30297 * @param[out] U32 *effBo
30302 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb
30307 RgSchDlHqTbCb *tbInfo,
30313 PRIVATE S16 rgSCHCmnDlAlloc1CwRetxRb(cell, subFrm, ue, tbInfo, noLyr,\
30318 RgSchDlHqTbCb *tbInfo;
30324 RgSchDlRbAlloc *allocInfo;
30327 TRC2(rgSCHCmnDlAlloc1CwRetxRb);
30329 allocInfo = RG_SCH_CMN_GET_ALLOCCB_FRM_UE(ue,cell);
30332 /* Fix for ccpu00123919: In case of RETX TB scheduling avoiding recomputation of RB
30333 * and Tbs. Set all parameters same as Init TX except RV(only for NACKED) and
30335 *numRb = tbInfo->dlGrnt.numRb;
30336 if ((S16)*numRb > (S16)(subFrm->bw - subFrm->bwAssigned))
30340 /* Update the subframe Allocated BW field */
30341 subFrm->bwAssigned += *numRb;
30342 imcs = tbInfo->dlGrnt.iMcs;
30343 allocInfo->dciFormat = tbInfo->dlGrnt.dciFormat;
30344 /* Fix: For a RETX TB the iTbs is irrelevant, hence setting 0 */
30345 RG_SCH_CMN_FILL_DL_TBINFO(&allocInfo->tbInfo[0], tbInfo->tbSz, \
30346 0, imcs, tbInfo, tbInfo->numLyrs);
30347 *effBo = tbInfo->tbSz;
30355 * @brief This function is called to handle Release PDCCH feedback for SPS UE
30359 * Function: rgSCHCmnDlRelPdcchFbk
30360 * Purpose: Invokes SPS module to handle release PDCCH feedback
30364 * @param[in] RgSchCellCb *cell
30365 * @param[in] RgSchUeCb *ue
30366 * @param[in] Bool isAck
30371 PUBLIC Void rgSCHCmnDlRelPdcchFbk
30378 PUBLIC Void rgSCHCmnDlRelPdcchFbk(cell, ue, isAck)
30385 TRC2(rgSCHCmnDlRelPdcchFbk);
30386 rgSCHCmnSpsDlRelPdcchFbk(cell, ue, isAck);
30393 * @brief This function is invoked to handle Ack processing for a HARQ proc.
30397 * Function: rgSCHCmnDlProcAck
30398 * Purpose: DTX processing for HARQ proc
30402 * @param[in] RgSchCellCb *cell
30403 * @param[in] RgSchDlHqProcCb *hqP
30408 PUBLIC Void rgSCHCmnDlProcAck
30411 RgSchDlHqProcCb *hqP
30414 PUBLIC Void rgSCHCmnDlProcAck(cell, hqP)
30416 RgSchDlHqProcCb *hqP;
30420 TRC2(rgSCHCmnDlProcAck);
30422 if (RG_SCH_CMN_SPS_DL_IS_SPS_HQP(hqP))
30424 /* Invoke SPS module if SPS service was scheduled for this HARQ proc */
30425 rgSCHCmnSpsDlProcAck(cell, hqP);
30429 #ifdef RGSCH_SPS_STATS
30430 extern U32 rgSchStatCrntiCeRcvCnt;
30433 * @brief This function is invoked to handle CRNTI CE reception for an UE
30437 * Function: rgSCHCmnHdlCrntiCE
30438 * Purpose: Handle CRNTI CE reception
30442 * @param[in] RgSchCellCb *cell
30443 * @param[in] RgSchDlHqProcCb *hqP
30448 PUBLIC Void rgSCHCmnHdlCrntiCE
30454 PUBLIC Void rgSCHCmnHdlCrntiCE(cell, ue)
30460 TRC2(rgSCHCmnHdlCrntiCE);
30461 #ifdef RGSCH_SPS_STATS
30462 rgSchStatCrntiCeRcvCnt++;
30465 /* When UL sync lost happened due to TA timer expiry UE is being moved to
30466 PDCCH order inactivity list.But when CRNTI CE received in msg3 from UE
30467 we are not moving UE into active state due to that RRC Reconfiguration is
30469 So here we are moving UE to active list whenever we receive the CRNTI CE and
30471 /* CR ccpu00144525 */
30472 if (RG_SCH_CMN_IS_UE_PDCCHODR_INACTV(ue))
30474 /* Activate this UE if it was inactive */
30475 RG_SCH_CMN_DL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30476 RG_SCH_CMN_UL_UPDT_INACTV_MASK ( cell, ue, RG_PDCCHODR_INACTIVE);
30479 /* Handling is same as reception of UE RESET for both DL and UL */
30480 if (ue->dl.dlSpsCfg.isDlSpsEnabled)
30482 rgSCHCmnSpsDlUeReset(cell, ue);
30484 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30486 rgSCHCmnSpsUlUeReset(cell, ue);
30494 * @brief This function is called to handle relInd from MAC for a UE
30498 * Function: rgSCHCmnUlSpsRelInd
30499 * Purpose: Invokes SPS module to handle UL SPS release for a UE
30501 * Invoked by: SCH_UTL
30503 * @param[in] RgSchCellCb *cell
30504 * @param[in] RgSchUeCb *ue
30505 * @param[in] Bool isExplRel
30510 PUBLIC Void rgSCHCmnUlSpsRelInd
30517 PUBLIC Void rgSCHCmnUlSpsRelInd(cell, ue, isExplRel)
30524 TRC2(rgSCHCmnUlSpsRelInd);
30525 rgSCHCmnSpsUlProcRelInd(cell, ue, isExplRel);
30528 } /* end of rgSCHCmnUlSpsRelInd */
30531 * @brief This function is called to handle SPS Activate Ind from MAC for a UE
30535 * Function: rgSCHCmnUlSpsActInd
30536 * Purpose: Invokes SPS module to handle UL SPS activate for a UE
30538 * Invoked by: SCH_UTL
30540 * @param[in] RgSchCellCb *cell
30541 * @param[in] RgSchUeCb *ue
30546 PUBLIC Void rgSCHCmnUlSpsActInd
30553 PUBLIC Void rgSCHCmnUlSpsActInd(cell, ue,spsSduSize)
30560 TRC2(rgSCHCmnUlSpsActInd);
30562 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30564 rgSCHCmnSpsUlProcActInd(cell, ue,spsSduSize);
30568 } /* end of rgSCHCmnUlSpsActInd */
30571 * @brief This function is called to handle CRC in UL for UEs
30572 * undergoing SPS release
30576 * Function: rgSCHCmnUlCrcInd
30577 * Purpose: Invokes SPS module to handle CRC in UL for SPS UE
30579 * Invoked by: SCH_UTL
30581 * @param[in] RgSchCellCb *cell
30582 * @param[in] RgSchUeCb *ue
30583 * @param[in] CmLteTimingInfo crcTime
30588 PUBLIC Void rgSCHCmnUlCrcInd
30592 CmLteTimingInfo crcTime
30595 PUBLIC Void rgSCHCmnUlCrcInd(cell, ue, crcTime)
30598 CmLteTimingInfo crcTime;
30602 TRC2(rgSCHCmnUlCrcInd);
30603 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30605 rgSCHCmnSpsUlProcCrcInd(cell, ue, crcTime);
30609 } /* end of rgSCHCmnUlCrcFailInd */
30612 * @brief This function is called to handle CRC failure in UL
30616 * Function: rgSCHCmnUlCrcFailInd
30617 * Purpose: Invokes SPS module to handle CRC failure in UL for SPS UE
30619 * Invoked by: SCH_UTL
30621 * @param[in] RgSchCellCb *cell
30622 * @param[in] RgSchUeCb *ue
30623 * @param[in] CmLteTimingInfo crcTime
30628 PUBLIC Void rgSCHCmnUlCrcFailInd
30632 CmLteTimingInfo crcTime
30635 PUBLIC Void rgSCHCmnUlCrcFailInd(cell, ue, crcTime)
30638 CmLteTimingInfo crcTime;
30642 TRC2(rgSCHCmnUlCrcFailInd);
30643 if (ue->ul.ulSpsCfg.isUlSpsEnabled == TRUE)
30645 rgSCHCmnSpsUlProcDtxInd(cell, ue, crcTime);
30649 } /* end of rgSCHCmnUlCrcFailInd */
30651 #endif /* LTEMAC_SPS */
30654 * @brief BCH,BCCH,PCCH Dowlink Scheduling Handler.
30658 * Function: rgSCHCmnDlBcchPcchAlloc
30659 * Purpose: This function calls common scheduler APIs to
30660 * schedule for BCCH/PCCH.
30661 * It then invokes Allocator for actual RB
30662 * allocations. It processes on the actual resources allocated
30663 * against requested to the allocator module.
30665 * Invoked by: Common Scheduler
30667 * @param[in] RgSchCellCb *cell
30671 PRIVATE Void rgSCHCmnDlBcchPcchAlloc
30676 PRIVATE Void rgSCHCmnDlBcchPcchAlloc(cell)
30681 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_SF_ALLOC_SIZE;
30683 #ifdef LTEMAC_HDFDD
30684 U8 nextSfIdx = (cell->crntSfIdx + RG_SCH_CMN_HARQ_INTERVAL) % RGSCH_NUM_SUB_FRAMES;
30686 U8 nextSfIdx = (cell->crntSfIdx) % RGSCH_NUM_SUB_FRAMES;
30689 RgInfSfAlloc *nextsfAlloc = &(cell->sfAllocArr[nextSfIdx]);
30690 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30691 RgSchCmnDlRbAllocInfo *allocInfo = &cellSch->allocInfo;
30693 TRC2(rgSCHCmnDlBcchPcchAlloc);
30696 /*Reset the bitmask for BCCH/PCCH*/
30697 rgSCHUtlResetSfAlloc(nextsfAlloc,TRUE,FALSE);
30698 #ifndef DISABLE_MIB_SIB /* Not sending MIB and SIB to CL */
30700 rgSCHChkNUpdSiCfg(cell);
30701 rgSCHSelectSi(cell);
30704 /*Perform the scheduling for BCCH,PCCH*/
30705 rgSCHCmnDlBcchPcch(cell, allocInfo, nextsfAlloc);
30707 /* Call common allocator for RB Allocation */
30708 rgSCHBcchPcchDlRbAlloc(cell, allocInfo);
30710 /* Finalize the Allocations for reqested Against alloced */
30711 rgSCHCmnDlBcchPcchFnlz(cell, allocInfo);
30712 #endif /* DISABLE_MIB_SIB */
30717 * @brief Handles RB allocation for BCCH/PCCH for downlink.
30721 * Function : rgSCHBcchPcchDlRbAlloc
30723 * Invoking Module Processing:
30724 * - This function is invoked for DL RB allocation of BCCH/PCCH
30726 * Processing Steps:
30727 * - If cell is frequency selecive,
30728 * - Call rgSCHDlfsBcchPcchAllocRb().
30730 * - Do the processing
30732 * @param[in] RgSchCellCb *cell
30733 * @param[in] RgSchDlRbAllocInfo *allocInfo
30738 PRIVATE Void rgSCHBcchPcchDlRbAlloc
30741 RgSchCmnDlRbAllocInfo *allocInfo
30744 PRIVATE Void rgSCHBcchPcchDlRbAlloc(cell, allocInfo)
30746 RgSchCmnDlRbAllocInfo *allocInfo;
30749 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
30751 TRC2(rgSCHBcchPcchDlRbAlloc);
30754 if (cellSch->dl.isDlFreqSel)
30756 cellSch->apisDlfs->rgSCHDlfsBcchPcchAllocRb(cell, allocInfo);
30760 rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo);
30767 * @brief Handles RB allocation for BCCH,PCCH for frequency
30768 * non-selective cell.
30772 * Function : rgSCHCmnNonDlfsBcchPcchRbAlloc
30774 * Invoking Module Processing:
30775 * - SCH shall invoke this if downlink frequency selective is disabled for
30776 * the cell for RB allocation.
30777 * - MAX C/I/PFS/RR shall provide the requiredBytes, required RBs
30778 * estimate and subframe for each allocation to be made to SCH.
30780 * Processing Steps:
30781 * - Allocate sequentially for BCCH,PCCH common channels.
30783 * @param[in] RgSchCellCb *cell
30784 * @param[in] RgSchCmnDlRbAllocInfo *allocInfo
30789 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc
30792 RgSchCmnDlRbAllocInfo *allocInfo
30795 PRIVATE Void rgSCHCmnNonDlfsBcchPcchRbAlloc(cell, allocInfo)
30797 RgSchCmnDlRbAllocInfo *allocInfo;
30800 RgSchDlRbAlloc *reqAllocInfo;
30802 TRC2(rgSCHCmnNonDlfsBcchPcchRbAlloc);
30805 /* Allocate for PCCH */
30806 reqAllocInfo = &(allocInfo->pcchAlloc);
30807 if (reqAllocInfo->rbsReq)
30809 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30811 /* Allocate for BCCH on DLSCH */
30812 reqAllocInfo = &(allocInfo->bcchAlloc);
30813 if (reqAllocInfo->rbsReq)
30815 rgSCHCmnNonDlfsCmnRbAlloc(cell, reqAllocInfo);
30823 * @brief This function implements the handling to check and
30824 * update the SI cfg at the start of the modificiation period.
30828 * Function: rgSCHChkNUpdSiCfg
30829 * Purpose: This function implements handling for update of SI Cfg
30830 * at the start of modification period.
30832 * Invoked by: Scheduler
30834 * @param[in] RgSchCellCb* cell
30840 PRIVATE Void rgSCHChkNUpdSiCfg
30845 PRIVATE Void rgSCHChkNUpdSiCfg(cell)
30849 CmLteTimingInfo pdSchTmInfo;
30851 TRC2(rgSCHChkNUpdSiCfg);
30854 pdSchTmInfo = cell->crntTime;
30855 #ifdef LTEMAC_HDFDD
30856 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
30857 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
30858 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
30860 RGSCH_INCR_SUB_FRAME(pdSchTmInfo, RG_SCH_CMN_DL_DELTA);
30864 /* Updating the SIB1 for Warning SI message immediately after it is received
30865 * from application. No need to wait for next modification period.
30867 if((pdSchTmInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
30868 && (RGSCH_SIB1_TX_SF_NUM == (pdSchTmInfo.slot % RGSCH_NUM_SUB_FRAMES)))
30870 /*Check whether SIB1 with PWS has been updated*/
30871 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_PWS_UPD)
30873 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30874 cell->siCb.newSiInfo.sib1Info.sib1);
30875 cell->siCb.crntSiInfo.sib1Info.mcs =
30876 cell->siCb.newSiInfo.sib1Info.mcs;
30877 cell->siCb.crntSiInfo.sib1Info.nPrb =
30878 cell->siCb.newSiInfo.sib1Info.nPrb;
30879 cell->siCb.crntSiInfo.sib1Info.msgLen =
30880 cell->siCb.newSiInfo.sib1Info.msgLen;
30881 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_PWS_UPD;
30885 /*Check if this SFN and SF No marks the start of next modification
30886 period. If current SFN,SF No doesn't marks the start of next
30887 modification period, then return. */
30888 if(!((pdSchTmInfo.sfn % cell->siCfg.modPrd == 0)
30889 && (0 == pdSchTmInfo.slot)))
30890 /*if(!((((pdSchTmInfo.hSfn * 1024) + pdSchTmInfo.sfn) % cell->siCfg.modPrd == 0)
30891 && (0 == pdSchTmInfo.slot)))*/
30896 /*Check whether MIB has been updated*/
30897 if(cell->siCb.siBitMask & RGSCH_SI_MIB_UPD)
30899 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.mib,
30900 cell->siCb.newSiInfo.mib);
30901 cell->siCb.siBitMask &= ~RGSCH_SI_MIB_UPD;
30904 /*Check whether SIB1 has been updated*/
30905 if(cell->siCb.siBitMask & RGSCH_SI_SIB1_UPD)
30907 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.sib1Info.sib1,
30908 cell->siCb.newSiInfo.sib1Info.sib1);
30909 cell->siCb.crntSiInfo.sib1Info.mcs = cell->siCb.newSiInfo.sib1Info.mcs;
30910 cell->siCb.crntSiInfo.sib1Info.nPrb = cell->siCb.newSiInfo.sib1Info.nPrb;
30911 cell->siCb.crntSiInfo.sib1Info.msgLen =
30912 cell->siCb.newSiInfo.sib1Info.msgLen;
30913 cell->siCb.siBitMask &= ~RGSCH_SI_SIB1_UPD;
30916 /*Check whether SIs have been updated*/
30917 if(cell->siCb.siBitMask & RGSCH_SI_SI_UPD)
30921 /*Check if SI cfg have been modified And Check if numSi have
30922 been changed, if yes then we would need to update the
30923 pointers for all the SIs */
30924 if((cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD) &&
30925 (cell->siCfg.numSi != cell->siCb.newSiCfg.numSi))
30927 for(idx = 0;idx < cell->siCb.newSiCfg.numSi;idx++)
30929 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30930 cell->siCb.newSiInfo.siInfo[idx].si);
30931 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30932 cell->siCb.siArray[idx].isWarningSi = FALSE;
30934 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30935 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30936 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30939 /*If numSi have been reduced then we need to free the
30940 pointers at the indexes in crntSiInfo which haven't
30941 been exercised. If numSi has increased then nothing
30942 additional is requires as above handling has taken
30944 if(cell->siCfg.numSi > cell->siCb.newSiCfg.numSi)
30946 for(idx = cell->siCb.newSiCfg.numSi;
30947 idx < cell->siCfg.numSi;idx++)
30949 RGSCH_FREE_MSG(cell->siCb.crntSiInfo.siInfo[idx].si);
30950 cell->siCb.siArray[idx].si = NULLP;
30956 /*numSi has not been updated, we just need to update the
30957 pointers for the SIs which are set to NON NULLP */
30958 /*ccpu00118260 - Correct Update of SIB2 */
30959 for(idx = 0;idx < cell->siCfg.numSi;idx++)
30961 if(NULLP != cell->siCb.newSiInfo.siInfo[idx].si)
30963 RGSCH_SET_SI_INFO(cell->siCb.crntSiInfo.siInfo[idx].si,
30964 cell->siCb.newSiInfo.siInfo[idx].si);
30966 cell->siCb.siArray[idx].si = cell->siCb.crntSiInfo.siInfo[idx].si;
30967 cell->siCb.siArray[idx].isWarningSi = FALSE;
30968 cell->siCb.crntSiInfo.siInfo[idx].mcs = cell->siCb.newSiInfo.siInfo[idx].mcs;
30969 cell->siCb.crntSiInfo.siInfo[idx].nPrb = cell->siCb.newSiInfo.siInfo[idx].nPrb;
30970 cell->siCb.crntSiInfo.siInfo[idx].msgLen = cell->siCb.newSiInfo.siInfo[idx].msgLen;
30974 cell->siCb.siBitMask &= ~RGSCH_SI_SI_UPD;
30977 /*Check whether SI cfg have been updated*/
30978 if(cell->siCb.siBitMask & RGSCH_SI_SICFG_UPD)
30980 cell->siCfg = cell->siCb.newSiCfg;
30981 cell->siCb.siBitMask &= ~RGSCH_SI_SICFG_UPD;
30989 * @brief This function implements the selection of the SI
30990 * that is to be scheduled.
30994 * Function: rgSCHSelectSi
30995 * Purpose: This function implements the selection of SI
30996 * that is to be scheduled.
30998 * Invoked by: Scheduler
31000 * @param[in] RgSchCellCb* cell
31006 PRIVATE Void rgSCHSelectSi
31011 PRIVATE Void rgSCHSelectSi(cell)
31015 CmLteTimingInfo crntTmInfo;
31020 TRC2(rgSCHSelectSi);
31023 crntTmInfo = cell->crntTime;
31024 #ifdef LTEMAC_HDFDD
31025 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31026 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31027 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31029 RGSCH_INCR_SUB_FRAME(crntTmInfo, RG_SCH_CMN_DL_DELTA);
31032 siWinSize = cell->siCfg.siWinSize;
31034 /* Select SI only once at the starting of the new window */
31035 if(cell->siCb.inWindow)
31037 if ((crntTmInfo.sfn % cell->siCfg.minPeriodicity) == 0 &&
31038 crntTmInfo.slot == 0)
31040 /* Reinit inWindow at the beginning of every SI window */
31041 cell->siCb.inWindow = siWinSize - 1;
31045 cell->siCb.inWindow--;
31049 else /* New window. Re-init the winSize counter with the window length */
31051 if((cell->siCb.siArray[cell->siCb.siCtx.siId - 1].isWarningSi == TRUE)&&
31052 (cell->siCb.siCtx.retxCntRem != 0))
31054 rgSCHUtlFreeWarningSiPdu(cell);
31055 cell->siCb.siCtx.warningSiFlag = FALSE;
31058 cell->siCb.inWindow = siWinSize - 1;
31061 x = rgSCHCmnGetSiSetId(crntTmInfo.sfn, crntTmInfo.slot,
31062 cell->siCfg.minPeriodicity);
31064 /* Window Id within a SI set. This window Id directly maps to a
31066 windowId = (((crntTmInfo.sfn * RGSCH_NUM_SUB_FRAMES_5G) +
31067 crntTmInfo.slot) - (x * (cell->siCfg.minPeriodicity * 10)))
31070 if(windowId >= RGR_MAX_NUM_SI)
31073 /* Update the siCtx if there is a valid SI and its periodicity
31075 if (NULLP != cell->siCb.siArray[windowId].si)
31077 /* Warning SI Periodicity is same as SIB2 Periodicity */
31078 if(((cell->siCb.siArray[windowId].isWarningSi == FALSE) &&
31079 (x % (cell->siCfg.siPeriodicity[windowId]
31080 /cell->siCfg.minPeriodicity) == 0)) ||
31081 ((cell->siCb.siArray[windowId].isWarningSi == TRUE) &&
31082 (x % (cell->siCfg.siPeriodicity[0]
31083 /cell->siCfg.minPeriodicity) == 0)))
31085 cell->siCb.siCtx.siId = windowId+1;
31086 cell->siCb.siCtx.retxCntRem = cell->siCfg.retxCnt;
31087 cell->siCb.siCtx.warningSiFlag = cell->siCb.siArray[windowId].
31089 cell->siCb.siCtx.timeToTx.sfn = crntTmInfo.sfn;
31090 cell->siCb.siCtx.timeToTx.slot = crntTmInfo.slot;
31092 RG_SCH_ADD_TO_CRNT_TIME(cell->siCb.siCtx.timeToTx,
31093 cell->siCb.siCtx.maxTimeToTx, (siWinSize - 1))
31097 {/* Update the siCtx with invalid si Id */
31098 cell->siCb.siCtx.siId = 0;
31106 * @brief This function implements scheduler DL allocation for
31111 * Function: rgSCHDlSiSched
31112 * Purpose: This function implements scheduler for DL allocation
31115 * Invoked by: Scheduler
31117 * @param[in] RgSchCellCb* cell
31123 PRIVATE Void rgSCHDlSiSched
31126 RgSchCmnDlRbAllocInfo *allocInfo,
31127 RgInfSfAlloc *subfrmAlloc
31130 PRIVATE Void rgSCHDlSiSched(cell, allocInfo, subfrmAlloc)
31132 RgSchCmnDlRbAllocInfo *allocInfo;
31133 RgInfSfAlloc *subfrmAlloc;
31136 CmLteTimingInfo crntTimInfo;
31142 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31143 /* DwPTS Scheduling Changes Start */
31146 U8 cfi = cellDl->currCfi;
31148 /* DwPTS Scheduling Changes End */
31150 TRC2(rgSCHDlSiSched);
31153 crntTimInfo = cell->crntTime;
31154 #ifdef LTEMAC_HDFDD
31155 /* For HDFDD we need scheduling information at least RG_SCH_CMN_DL_DELTA
31156 + RG_SCH_CMN_HARQ_INTERVAL (7) subframes ahead */
31157 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA + RG_SCH_CMN_HARQ_INTERVAL);
31159 RGSCH_INCR_SUB_FRAME(crntTimInfo, RG_SCH_CMN_DL_DELTA);
31162 /* Compute the subframe for which allocation is being made.
31163 Essentially, we need pointer to the dl frame for this subframe */
31164 sf = rgSCHUtlSubFrmGet(cell, crntTimInfo);
31166 /*Check if scheduling of MIB is required */
31168 /* since we are adding the MIB repetition logic for EMTC UEs, checking if
31169 * emtcEnabled or not, If enabled MIB would be repeted at as part of EMTC
31170 * feature, otherwise scheduling at (n,0) */
31171 if(0 == cell->emtcEnable)
31174 if((crntTimInfo.sfn % RGSCH_MIB_PERIODICITY == 0)
31175 && (RGSCH_MIB_TX_SF_NUM == crntTimInfo.slot))
31178 U8 sfnOctet, mibOct2 = 0;
31180 /*If MIB has not been yet setup by Application, return*/
31181 if(NULLP == cell->siCb.crntSiInfo.mib)
31184 SFndLenMsg(cell->siCb.crntSiInfo.mib, &mibLen);
31185 sf->bch.tbSize = mibLen;
31186 /*Fill the interface information */
31187 rgSCHUtlFillRgInfCmnLcInfo(sf, subfrmAlloc, NULLD, NULLD);
31189 /*Set the bits of MIB to reflect SFN */
31190 /*First get the Most signficant 8 bits of SFN */
31191 sfnOctet = (U8)(crntTimInfo.sfn >> 2);
31192 /*Get the first two octets of MIB, and then update them
31193 using the SFN octet value obtained above.*/
31194 if(ROK != SExamMsg((Data *)(&mibOct1),
31195 cell->siCb.crntSiInfo.mib, 0))
31198 if(ROK != SExamMsg((Data *)(&mibOct2),
31199 cell->siCb.crntSiInfo.mib, 1))
31202 /* ccpu00114572- Fix for improper way of MIB Octet setting for SFN */
31203 mibOct1 = (mibOct1 & 0xFC) | (sfnOctet >> 6);
31204 mibOct2 = (mibOct2 & 0x03) | (sfnOctet << 2);
31205 /* ccpu00114572- Fix ends*/
31207 /*Now, replace the two octets in MIB */
31208 if(ROK != SRepMsg((Data)(mibOct1),
31209 cell->siCb.crntSiInfo.mib, 0))
31212 if(ROK != SRepMsg((Data)(mibOct2),
31213 cell->siCb.crntSiInfo.mib, 1))
31216 /*Copy the MIB msg buff into interface buffer */
31217 SCpyMsgMsg(cell->siCb.crntSiInfo.mib,
31218 rgSchCb[cell->instIdx].rgSchInit.region,
31219 rgSchCb[cell->instIdx].rgSchInit.pool,
31220 &subfrmAlloc->cmnLcInfo.bchInfo.pdu);
31221 /* Added Dl TB count for MIB message transmission
31222 * This counter is incremented 4 times to consider
31223 * the retransmission at the PHY level on PBCH channel*/
31225 cell->dlUlTbCnt.tbTransDlTotalCnt += RG_SCH_MIB_CNT;
31232 allocInfo->bcchAlloc.schdFirst = FALSE;
31233 /*Check if scheduling of SIB1 is required.
31234 Check of (crntTimInfo.sfn % RGSCH_SIB1_PERIODICITY == 0)
31235 is not required here since the below check takes care
31236 of SFNs applicable for this one too.*/
31237 if((crntTimInfo.sfn % RGSCH_SIB1_RPT_PERIODICITY == 0)
31238 && (RGSCH_SIB1_TX_SF_NUM == crntTimInfo.slot))
31240 /*If SIB1 has not been yet setup by Application, return*/
31241 if(NULLP == (cell->siCb.crntSiInfo.sib1Info.sib1))
31246 allocInfo->bcchAlloc.schdFirst = TRUE;
31247 mcs = cell->siCb.crntSiInfo.sib1Info.mcs;
31248 nPrb = cell->siCb.crntSiInfo.sib1Info.nPrb;
31249 msgLen = cell->siCb.crntSiInfo.sib1Info.msgLen;
31253 /*Check if scheduling of SI can be performed.*/
31254 Bool invalid = FALSE;
31256 if(cell->siCb.siCtx.siId == 0)
31259 /*Check if the Si-Window for the current Si-Context is completed*/
31260 invalid = rgSCHCmnChkPastWin(crntTimInfo, cell->siCb.siCtx.maxTimeToTx);
31263 /* LTE_ADV_FLAG_REMOVED_START */
31264 if(cell->siCb.siCtx.retxCntRem)
31266 RGSCHLOGERROR(cell->instIdx,ERRCLS_INT_PAR,ERG011,(ErrVal)cell->siCb.siCtx.siId,
31267 "rgSCHDlSiSched(): SI not scheduled and window expired");
31269 /* LTE_ADV_FLAG_REMOVED_END */
31270 if(cell->siCb.siCtx.warningSiFlag == TRUE)
31272 rgSCHUtlFreeWarningSiPdu(cell);
31273 cell->siCb.siCtx.warningSiFlag = FALSE;
31278 /*Check the timinginfo of the current SI-Context to see if its
31279 transmission can be scheduled. */
31280 if(FALSE == (rgSCHCmnChkInWin(crntTimInfo,
31281 cell->siCb.siCtx.timeToTx,
31282 cell->siCb.siCtx.maxTimeToTx)))
31287 /*Check if retransmission count has become 0*/
31288 if(0 == cell->siCb.siCtx.retxCntRem)
31293 /* LTE_ADV_FLAG_REMOVED_START */
31294 /* Check if ABS is enabled/configured */
31295 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31297 /* The pattern type is RGR_ABS_MUTE, then eNB need to blank the subframe */
31298 if(cell->lteAdvCb.absCfg.absPatternType & RGR_ABS_MUTE)
31300 /* Determine next scheduling subframe is ABS or not */
31301 if(RG_SCH_ABS_ENABLED_ABS_SF == (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern
31302 [((crntTimInfo.sfn*RGSCH_NUM_SUB_FRAMES) + crntTimInfo.slot) % RGR_ABS_PATTERN_LEN]))
31304 /* Skip the SI scheduling to next tti */
31309 /* LTE_ADV_FLAG_REMOVED_END */
31311 /*Schedule the transmission of the current SI-Context */
31312 /*Find out the messg length for the SI message */
31313 /* warningSiFlag is to differentiate between Warning SI
31315 if((rgSCHUtlGetMcsAndNPrb(cell, &nPrb, &mcs, &msgLen)) != ROK)
31320 cell->siCb.siCtx.i = RGSCH_CALC_SF_DIFF(crntTimInfo,
31321 cell->siCb.siCtx.timeToTx);
31325 /*Get the number of rb required */
31326 /*rgSCHCmnClcRbAllocForFxdTb(cell, msgLen, cellDl->ccchCqi, &rb);*/
31327 if(cellDl->bitsPerRb==0)
31329 while ((rgTbSzTbl[0][0][rb]) < (U32) (msgLen*8))
31337 rb = RGSCH_CEIL((msgLen*8), cellDl->bitsPerRb);
31339 /* DwPTS Scheduling Changes Start */
31341 if (sf->sfType == RG_SCH_SPL_SF_DATA)
31343 RGSCH_GET_SPS_SF_CFI(cell->bwCfg.dlTotalBw, cfi);
31345 /* Calculate the less RE's because of DwPTS */
31346 lostRe = rb * (cellDl->noResPerRb[cfi] - cellDl->numReDwPts[cfi]);
31348 /* Increase number of RBs in Spl SF to compensate for lost REs */
31349 rb += RGSCH_CEIL(lostRe, cellDl->numReDwPts[cfi]);
31352 /* DwPTS Scheduling Changes End */
31353 /*ccpu00115595- end*/
31354 /* Additional check to see if required RBs
31355 * exceeds the available */
31356 if (rb > sf->bw - sf->bwAssigned)
31358 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "rgSCHDlSiSched(): "
31359 "BW allocation failed CRNTI:%d",RGSCH_SI_RNTI);
31363 /* Update the subframe Allocated BW field */
31364 sf->bwAssigned = sf->bwAssigned + rb;
31366 /*Fill the parameters in allocInfo */
31367 allocInfo->bcchAlloc.rnti = RGSCH_SI_RNTI;
31368 allocInfo->bcchAlloc.dlSf = sf;
31369 allocInfo->bcchAlloc.rbsReq = rb;
31370 /*ccpu00116710- MCS is not getting assigned */
31371 allocInfo->bcchAlloc.tbInfo[0].imcs = mcs;
31373 /* ccpu00117510 - ADD - Assignment of nPrb and other information */
31374 allocInfo->bcchAlloc.nPrb = nPrb;
31375 allocInfo->bcchAlloc.tbInfo[0].bytesReq = msgLen;
31376 allocInfo->bcchAlloc.tbInfo[0].noLyr = 1;
31379 #endif /*RGR_SI_SCH*/
31382 /* ccpu00117452 - MOD - Changed macro name from
31383 RGR_RRM_DLPWR_CNTRL to RGR_CQI_REPT */
31384 #ifdef RGR_CQI_REPT
31386 * @brief This function Updates the DL CQI for the UE.
31390 * Function: rgSCHCmnUeDlPwrCtColltCqiRept
31391 * Purpose: Manages PUSH N CQI reporting
31392 * Step 1: Store the CQI in collation array
31393 * Step 2: Increament the tracking count
31394 * Step 3: Check is it time to to send the report
31395 * Step 4: if yes, Send StaInd to RRM
31396 * Step 4.1: Fill StaInd for sending collated N CQI rpeorts
31397 * Step 4.2: Call utility function (rgSCHUtlRgrStaInd) to send rpts to RRM
31398 * Step 4.2.1: If sending was not sucessful, return RFAILED
31399 * Step 4.2.2: If sending was sucessful, return ROK
31400 * Step 5: If no, return
31401 * Invoked by: rgSCHCmnDlCqiInd
31403 * @param[in] RgSchCellCb *cell
31404 * @param[in] RgSchUeCb *ue
31405 * @param[in] RgrUeCqiRept *ueCqiRpt
31410 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept
31414 RgrUeCqiRept *ueCqiRpt
31417 PRIVATE S16 rgSCHCmnUeDlPwrCtColltCqiRept(cell, ue, ueCqiRpt)
31420 RgrUeCqiRept *ueCqiRpt;
31423 U8 *cqiCount = NULLP;
31425 RgrStaIndInfo *staInfo = NULLP;
31427 TRC2(rgSCHCmnUeDlPwrCtColltCqiRept)
31429 /* Step 1: Store the CQI in collation array */
31430 /* Step 2: Increament the tracking count */
31431 cqiCount = &(ue->schCqiInfo.cqiCount);
31432 ue->schCqiInfo.cqiRept[(*cqiCount)++] =
31436 /* Step 3: Check is it time to to send the report */
31437 if(RG_SCH_CQIR_IS_TIMTOSEND_CQIREPT(ue))
31439 /* Step 4: if yes, Send StaInd to RRM */
31440 retVal = rgSCHUtlAllocSBuf (cell->instIdx,(Data**)&staInfo,
31441 sizeof(RgrStaIndInfo));
31444 RLOG_ARG1(L_ERROR,DBG_CELLID,cell->cellId, "Could not "
31445 "allocate memory for sending StaInd CRNTI:%d",ue->ueId);
31449 /* Step 4.1: Fill StaInd for sending collated N CQI rpeorts */
31452 extern U32 gCqiReptToAppCount;
31453 gCqiReptToAppCount++;
31458 retVal = rgSCHUtlFillSndStaInd(cell, ue, staInfo,
31459 ue->cqiReptCfgInfo.numColltdCqiRept);
31465 } /* End of rgSCHCmnUeDlPwrCtColltCqiRept */
31467 #endif /* End of RGR_CQI_REPT */
31470 * @brief This function checks for the retransmisson
31471 * for a DTX scenario.
31478 * @param[in] RgSchCellCb *cell
31479 * @param[in] RgSchUeCb *ue
31485 PUBLIC Void rgSCHCmnChkRetxAllowDtx
31489 RgSchDlHqProcCb *proc,
31493 PUBLIC Void rgSCHCmnChkRetxAllowDtx(cell, ueCb, proc, reTxAllwd)
31496 RgSchDlHqProcCb *proc;
31500 TRC3(rgSCHCmnChkRetxAllowDtx)
31505 if ((proc->tbInfo[0].isAckNackDtx == TFU_HQFDB_DTX))
31507 *reTxAllwd = FALSE;
31514 * @brief API for calculating the SI Set Id
31518 * Function: rgSCHCmnGetSiSetId
31520 * This API is used for calculating the SI Set Id, as shown below
31522 * siSetId = 0 siSetId = 1
31523 * |******************|******************|---------------->
31524 * (0,0) (8,0) (16,0) (SFN, SF)
31527 * @param[in] U16 sfn
31529 * @return U16 siSetId
31532 PUBLIC U16 rgSCHCmnGetSiSetId
31539 PUBLIC U16 rgSCHCmnGetSiSetId(sfn, sf, minPeriodicity)
31542 U16 minPeriodicity;
31545 /* 80 is the minimum SI periodicity in sf. Also
31546 * all other SI periodicities are multiples of 80 */
31547 return (((sfn * RGSCH_NUM_SUB_FRAMES_5G) + sf) / (minPeriodicity * 10));
31551 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31555 * Function: rgSCHCmnCalcDwPtsTbSz
31557 * @param[in] RgSchCellCb *cell
31558 * @param[in] U32 bo
31559 * @param[in/out] U8 *rb
31560 * @param[in/out] U8 *iTbs
31561 * @param[in] U8 lyr
31562 * @param[in] U8 cfi
31566 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz
31576 PRIVATE U32 rgSCHCmnCalcDwPtsTbSz(cell, bo, rb, iTbs, lyr, cfi)
31586 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31587 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31588 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31590 TRC2(rgSCHCmnCalcDwPtsTbSz);
31592 /* DwPts Rb cannot exceed the cell Bw */
31593 numDwPtsRb = RGSCH_MIN(numDwPtsRb, cellDl->maxDlBwPerUe);
31595 /* Adjust the iTbs for optimum usage of the DwPts region.
31596 * Using the same iTbs adjustment will not work for all
31597 * special subframe configurations and iTbs levels. Hence use the
31598 * static iTbs Delta table for adjusting the iTbs */
31599 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs);
31603 while(rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1] < bo*8 &&
31604 numDwPtsRb < cellDl->maxDlBwPerUe)
31609 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31613 tbSz = rgTbSzTbl[lyr-1][*iTbs][RGSCH_MAX(numDwPtsRb*3/4,1)-1];
31621 * @brief API for calculating the DwPts Rb, Itbs and tbSz
31625 * Function: rgSCHCmnCalcDwPtsTbSz2Cw
31627 * @param[in] RgSchCellCb *cell
31628 * @param[in] U32 bo
31629 * @param[in/out] U8 *rb
31630 * @param[in] U8 maxRb
31631 * @param[in/out] U8 *iTbs1
31632 * @param[in/out] U8 *iTbs2
31633 * @param[in] U8 lyr1
31634 * @param[in] U8 lyr2
31635 * @return[in/out] U32 *tb1Sz
31636 * @return[in/out] U32 *tb2Sz
31637 * @param[in] U8 cfi
31640 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw
31655 PRIVATE Void rgSCHCmnCalcDwPtsTbSz2Cw(cell, bo, rb, maxRb, iTbs1, iTbs2,
31656 lyr1, lyr2, tb1Sz, tb2Sz, cfi)
31670 RgSchCmnDlCell *cellDl = RG_SCH_CMN_GET_DL_CELL(cell);
31671 U32 numRE = *rb * cellDl->noResPerRb[cfi];
31672 U32 numDwPtsRb = RGSCH_CEIL(numRE, cellDl->numReDwPts[cfi]);
31674 TRC2(rgSCHCmnCalcDwPtsTbSz2Cw);
31676 /* DwPts Rb cannot exceed the cell Bw */
31677 numDwPtsRb = RGSCH_MIN(numDwPtsRb, maxRb);
31679 /* Adjust the iTbs for optimum usage of the DwPts region.
31680 * Using the same iTbs adjustment will not work for all
31681 * special subframe configurations and iTbs levels. Hence use the
31682 * static iTbs Delta table for adjusting the iTbs */
31683 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs1);
31684 RG_SCH_CMN_ADJ_DWPTS_ITBS(cellDl, *iTbs2);
31686 while((rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1] +
31687 rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1])< bo*8 &&
31688 numDwPtsRb < maxRb)
31693 *tb1Sz = rgTbSzTbl[lyr1-1][*iTbs1][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31694 *tb2Sz = rgTbSzTbl[lyr2-1][*iTbs2][RGSCH_MAX(numDwPtsRb*3/4,1)-1]/8;
31704 * @brief Updates the GBR LCGs when datInd is received from MAC
31708 * Function: rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31709 * Purpose: This function updates the GBR LCGs
31710 * when datInd is received from MAC.
31714 * @param[in] RgSchCellCb *cell
31715 * @param[in] RgSchUeCb *ue
31716 * @param[in] RgInfUeDatInd *datInd
31720 PUBLIC Void rgSCHCmnUpdUeDataIndLcg
31724 RgInfUeDatInd *datInd
31727 PUBLIC Void rgSCHCmnUpdUeDataIndLcg(cell, ue, datInd)
31730 RgInfUeDatInd *datInd;
31734 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31736 Inst inst = cell->instIdx;
31739 TRC2(rgSCHCmnUpdUeDataIndLcg);
31741 for (idx = 0; (idx < RGINF_MAX_LCG_PER_UE - 1); idx++)
31743 if (datInd->lcgInfo[idx].bytesRcvd != 0)
31745 U8 lcgId = datInd->lcgInfo[idx].lcgId;
31746 U32 bytesRcvd = datInd->lcgInfo[idx].bytesRcvd;
31748 if (RGSCH_LCG_ISCFGD(&ue->ul.lcgArr[lcgId]))
31750 RgSchCmnLcg *cmnLcg = ((RgSchCmnLcg *)(ue->ul.lcgArr[lcgId].sch));
31751 if (RGSCH_IS_GBR_BEARER(cmnLcg->cfgdGbr))
31753 if(bytesRcvd > cmnLcg->effGbr)
31755 bytesRcvd -= cmnLcg->effGbr;
31756 cmnLcg->effDeltaMbr = (cmnLcg->effDeltaMbr > bytesRcvd) ? \
31757 (cmnLcg->effDeltaMbr - bytesRcvd) : (0);
31758 cmnLcg->effGbr = 0;
31762 cmnLcg->effGbr -= bytesRcvd;
31764 /* To keep BS updated with the amount of data received for the GBR */
31765 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31766 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31767 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, cmnLcg->effGbr+cmnLcg->effDeltaMbr);
31769 else if(lcgId != 0)
31771 ue->ul.effAmbr = (ue->ul.effAmbr > datInd->lcgInfo[idx].bytesRcvd) ? \
31772 (ue->ul.effAmbr - datInd->lcgInfo[idx].bytesRcvd) : (0);
31773 cmnLcg->reportedBs = (cmnLcg->reportedBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31774 (cmnLcg->reportedBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31775 cmnLcg->bs = RGSCH_MIN(cmnLcg->reportedBs, ue->ul.effAmbr);
31776 ue->ul.nonGbrLcgBs = (ue->ul.nonGbrLcgBs > datInd->lcgInfo[idx].bytesRcvd) ? \
31777 (ue->ul.nonGbrLcgBs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31779 ue->ul.nonLcg0Bs = (ue->ul.nonLcg0Bs > datInd->lcgInfo[idx].bytesRcvd) ? \
31780 (ue->ul.nonLcg0Bs - datInd->lcgInfo[idx].bytesRcvd) : (0);
31789 if(TRUE == ue->isEmtcUe)
31791 if (cellSch->apisEmtcUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31793 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31800 if (cellSch->apisUl->rgSCHRgrUlLcgUpd(cell, ue, datInd) != ROK)
31802 RGSCHDBGERRNEW(inst, (rgSchPBuf(inst), "\n rgSCHCmnUpdUeDataIndLcg(): rgSCHRgrUlLcgUpd returned failure"));
31808 /** @brief This function initializes DL allocation lists and prepares
31813 * Function: rgSCHCmnInitRbAlloc
31815 * @param [in] RgSchCellCb *cell
31821 PRIVATE Void rgSCHCmnInitRbAlloc
31826 PRIVATE Void rgSCHCmnInitRbAlloc (cell)
31830 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
31831 CmLteTimingInfo frm;
31835 TRC2(rgSCHCmnInitRbAlloc);
31837 /* Initializing RgSchCmnUlRbAllocInfo structure.*/
31838 rgSCHCmnInitDlRbAllocInfo(&cellSch->allocInfo);
31840 frm = cellSch->dl.time;
31842 dlSf = rgSCHUtlSubFrmGet(cell, frm);
31844 dlSf->numGrpPerTti = cell->cell5gtfCb.ueGrpPerTti;
31845 dlSf->numUePerGrp = cell->cell5gtfCb.uePerGrpPerTti;
31846 for(idx = 0; idx < MAX_5GTF_BEAMS; idx++)
31848 dlSf->sfBeamInfo[idx].totVrbgAllocated = 0;
31849 dlSf->sfBeamInfo[idx].totVrbgRequired = 0;
31850 dlSf->sfBeamInfo[idx].vrbgStart = 0;
31853 dlSf->remUeCnt = cellSch->dl.maxUePerDlSf;
31854 /* Updating the Subframe information in RBAllocInfo */
31855 cellSch->allocInfo.dedAlloc.dedDlSf = dlSf;
31856 cellSch->allocInfo.msg4Alloc.msg4DlSf = dlSf;
31858 /* LTE_ADV_FLAG_REMOVED_START */
31859 /* Determine next scheduling subframe is ABS or not */
31860 if(RGR_ENABLE == cell->lteAdvCb.absCfg.status)
31862 cell->lteAdvCb.absPatternDlIdx =
31863 ((frm.sfn*RGSCH_NUM_SUB_FRAMES_5G) + frm.slot) % RGR_ABS_PATTERN_LEN;
31864 cell->lteAdvCb.absDlSfInfo = (RgSchAbsSfEnum)(cell->lteAdvCb.absCfg.absPattern[
31865 cell->lteAdvCb.absPatternDlIdx]);
31870 cell->lteAdvCb.absDlSfInfo = RG_SCH_ABS_DISABLED;
31872 /* LTE_ADV_FLAG_REMOVED_END */
31875 cellSch->allocInfo.ccchSduAlloc.ccchSduDlSf = dlSf;
31878 /* Update subframe-wide allocation information with SPS allocation */
31879 rgSCHCmnSpsDlUpdDlSfAllocWithSps(cell, frm, dlSf);
31888 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31893 * Function: rgSCHCmnSendTxModeInd(cell, ueUl, newTxMode)
31894 * Purpose: This function sends the TX mode Change
31895 * indication to RRM
31900 * @param[in] RgSchCellCb *cell
31901 * @param[in] RgSchUeCb *ue
31902 * @param[in] U8 newTxMode
31906 PRIVATE Void rgSCHCmnSendTxModeInd
31913 PRIVATE Void rgSCHCmnSendTxModeInd(cell, ue, newTxMode)
31919 RgmTransModeInd *txModeChgInd;
31920 RgSchCmnDlUe *ueDl = RG_SCH_CMN_GET_DL_UE(ue,cell);
31922 TRC2(rgSCHCmnSendTxModeInd);
31924 if(!(ueDl->mimoInfo.forceTD & RG_SCH_CMN_TD_TXMODE_RECFG))
31927 if(SGetSBuf(cell->rgmSap->sapCfg.sapPst.region,
31928 cell->rgmSap->sapCfg.sapPst.pool, (Data**)&txModeChgInd,
31929 sizeof(RgmTransModeInd)) != ROK)
31933 RG_SCH_FILL_RGM_TRANSMODE_IND(ue->ueId, cell->cellId, newTxMode, txModeChgInd);
31934 RgUiRgmChangeTransModeInd(&(cell->rgmSap->sapCfg.sapPst),
31935 cell->rgmSap->sapCfg.suId, txModeChgInd);
31938 ue->mimoInfo.txModUpChgFactor = 0;
31939 ue->mimoInfo.txModDownChgFactor = 0;
31940 ueDl->laCb[0].deltaiTbs = 0;
31946 * @brief Check & Updates the TM Mode chnage threashold based on cqiiTbs and
31951 * Function: rgSchCheckAndTriggerModeChange(cell, ueUl, iTbsNew)
31952 * Purpose: This function update and check for threashold for TM mode
31957 * @param[in] RgSchCellCb *cell
31958 * @param[in] RgSchUeCb *ue
31959 * @param[in] U8 iTbs
31963 PUBLIC Void rgSchCheckAndTriggerModeChange
31972 PUBLIC Void rgSchCheckAndTriggerModeChange(cell, ue, reportediTbs, previTbs, maxiTbs)
31980 RgrTxMode txMode; /*!< UE's Transmission Mode */
31981 RgrTxMode modTxMode; /*!< UE's Transmission Mode */
31983 TRC2(rgSchCheckAndTriggerModeChange);
31985 txMode = ue->mimoInfo.txMode;
31987 /* Check for Step down */
31988 /* Step down only when TM4 is configured. */
31989 if(RGR_UE_TM_4 == txMode)
31991 if((previTbs <= reportediTbs) && ((reportediTbs - previTbs) >= RG_SCH_MODE_CHNG_STEPDOWN_CHECK_FACTOR))
31993 ue->mimoInfo.txModDownChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
31997 ue->mimoInfo.txModDownChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32000 ue->mimoInfo.txModDownChgFactor =
32001 RGSCH_MAX(ue->mimoInfo.txModDownChgFactor, -(RG_SCH_MODE_CHNG_STEPDOWN_THRSHD));
32003 if(ue->mimoInfo.txModDownChgFactor >= RG_SCH_MODE_CHNG_STEPDOWN_THRSHD)
32005 /* Trigger Mode step down */
32006 modTxMode = RGR_UE_TM_3;
32007 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32011 /* Check for Setup up */
32012 /* Step Up only when TM3 is configured, Max possible Mode is TM4*/
32013 if(RGR_UE_TM_3 == txMode)
32015 if((previTbs > reportediTbs) || (maxiTbs == previTbs))
32017 ue->mimoInfo.txModUpChgFactor += RG_SCH_MODE_CHNG_STEPUP_FACTOR;
32021 ue->mimoInfo.txModUpChgFactor -= RG_SCH_MODE_CHNG_STEPDOWN_FACTOR;
32024 ue->mimoInfo.txModUpChgFactor =
32025 RGSCH_MAX(ue->mimoInfo.txModUpChgFactor, -(RG_SCH_MODE_CHNG_STEPUP_THRSHD));
32027 /* Check if TM step up need to be triggered */
32028 if(ue->mimoInfo.txModUpChgFactor >= RG_SCH_MODE_CHNG_STEPUP_THRSHD)
32030 /* Trigger mode chnage */
32031 modTxMode = RGR_UE_TM_4;
32032 rgSCHCmnSendTxModeInd(cell, ue, modTxMode);
32041 * @brief Updates the GBR LCGs when datInd is received from MAC
32045 * Function: rgSCHCmnIsDlCsgPrio (cell)
32046 * Purpose: This function returns if csg UEs are
32047 * having priority at current time
32049 * Invoked by: Scheduler
32051 * @param[in] RgSchCellCb *cell
32052 * @param[in] RgSchUeCb *ue
32053 * @param[in] RgInfUeDatInd *datInd
32057 PUBLIC Bool rgSCHCmnIsDlCsgPrio
32062 PUBLIC Bool rgSCHCmnIsDlCsgPrio(cell)
32067 RgSchCmnDlCell *cmnDlCell = RG_SCH_CMN_GET_DL_CELL(cell);
32069 TRC2(rgSCHCmnIsDlCsgPrio)
32070 /* Calculating the percentage resource allocated */
32071 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32077 if(((cmnDlCell->ncsgPrbCnt * 100) / cmnDlCell->totPrbCnt) < cell->minDlResNonCsg)
32089 * @brief Updates the GBR LCGs when datInd is received from MAC
32093 * Function: rgSCHCmnIsUlCsgPrio (cell)
32094 * Purpose: This function returns if csg UEs are
32095 * having priority at current time
32097 * Invoked by: Scheduler
32099 * @param[in] RgSchCellCb *cell
32100 * @param[in] RgSchUeCb *ue
32101 * @param[in] RgInfUeDatInd *datInd
32105 PUBLIC Bool rgSCHCmnIsUlCsgPrio
32110 PUBLIC Bool rgSCHCmnIsUlCsgPrio(cell)
32114 RgSchCmnUlCell *cmnUlCell = RG_SCH_CMN_GET_UL_CELL(cell);
32116 TRC2(rgSCHCmnIsUlCsgPrio)
32118 /* Calculating the percentage resource allocated */
32119 if(RGR_CELL_ACCS_HYBRID != rgSchCb[cell->instIdx].rgrSchedEnbCfg.accsMode)
32125 if (((cmnUlCell->ncsgPrbCnt * 100) /cmnUlCell->totPrbCnt) < cell->minUlResNonCsg)
32136 /** @brief DL scheduler for SPS, and all other downlink data
32140 * Function: rgSchCmnPreDlSch
32142 * @param [in] Inst schInst;
32147 PUBLIC Void rgSchCmnPreDlSch
32149 RgSchCellCb **cell,
32151 RgSchCellCb **cellLst
32154 PUBLIC Void rgSchCmnPreDlSch(cell, nCell, cellLst)
32155 RgSchCellCb **cell;
32157 RgSchCellCb **cellLst;
32160 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell[0]);
32164 TRC2(rgSchCmnPreDlSch);
32166 if(nCell > CM_LTE_MAX_CELLS)
32171 if (cell[0]->isDlDataAllwd && (cell[0]->stopDlSch == FALSE))
32173 /* Specific DL scheduler to perform UE scheduling */
32174 cellSch->apisDl->rgSCHDlPreSched(cell[0]);
32176 /* Rearranging the cell entries based on their remueCnt in SF.
32177 * cells will be processed in the order of number of ue scheduled
32179 for (idx = 0; idx < nCell; idx++)
32182 cellSch = RG_SCH_CMN_GET_CELL(cell[idx]);
32183 sf = cellSch->allocInfo.dedAlloc.dedDlSf;
32187 cellLst[idx] = cell[idx];
32191 for(j = 0; j < idx; j++)
32193 RgSchCmnCell *cmnCell = RG_SCH_CMN_GET_CELL(cellLst[j]);
32194 RgSchDlSf *subfrm = cmnCell->allocInfo.dedAlloc.dedDlSf;
32196 if(sf->remUeCnt < subfrm->remUeCnt)
32199 for(k = idx; k > j; k--)
32201 cellLst[k] = cellLst[k-1];
32206 cellLst[j] = cell[idx];
32211 for (idx = 0; idx < nCell; idx++)
32213 cellLst[idx] = cell[idx];
32219 /** @brief DL scheduler for SPS, and all other downlink data
32222 * Function: rgSchCmnPstDlSch
32224 * @param [in] Inst schInst;
32229 PUBLIC Void rgSchCmnPstDlSch
32234 PUBLIC Void rgSchCmnPstDlSch(cell)
32238 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32240 TRC2(rgSchCmnPstDlSch);
32242 if (cell->isDlDataAllwd && (cell->stopDlSch == FALSE))
32244 cellSch->apisDl->rgSCHDlPstSched(cell->instIdx);
32249 PUBLIC U8 rgSCHCmnCalcPcqiBitSz
32255 PUBLIC U8 rgSCHCmnCalcPcqiBitSz(ueCb, numTxAnt)
32263 RgSchUePCqiCb *cqiCb = ueCb->nPCqiCb;
32265 TRC3(rgSCHCmnCalcPcqiBitSz);
32267 confRepMode = cqiCb->cqiCfg.cqiSetup.prdModeEnum;
32268 if((ueCb->mimoInfo.txMode != RGR_UE_TM_3) &&
32269 (ueCb->mimoInfo.txMode != RGR_UE_TM_4))
32275 ri = cqiCb->perRiVal;
32277 switch(confRepMode)
32279 case RGR_PRD_CQI_MOD10:
32285 case RGR_PRD_CQI_MOD11:
32298 else if(numTxAnt == 4)
32311 /* This is number of antenna case 1.
32312 * This is not applicable for Mode 1-1.
32313 * So setting it to invalid value */
32319 case RGR_PRD_CQI_MOD20:
32327 pcqiSz = 4 + cqiCb->label;
32332 case RGR_PRD_CQI_MOD21:
32347 else if(numTxAnt == 4)
32360 /* This might be number of antenna case 1.
32361 * For mode 2-1 wideband case only antenna port 2 or 4 is supported.
32362 * So setting invalid value.*/
32370 pcqiSz = 4 + cqiCb->label;
32374 pcqiSz = 7 + cqiCb->label;
32387 /** @brief DL scheduler for SPS, and all other downlink data
32391 * Function: rgSCHCmnDlSch
32393 * @param [in] RgSchCellCb *cell
32399 PUBLIC Void rgSCHCmnDlSch
32404 PUBLIC Void rgSCHCmnDlSch (cell)
32409 RgSchCmnCell *cellSch = RG_SCH_CMN_GET_CELL(cell);
32411 RgSchDynTddCb *rgSchDynTddInfo = &(rgSchCb[cell->instIdx].rgSchDynTdd);
32415 TRC2(rgSCHCmnDlSch);
32417 dlSf = rgSCHUtlSubFrmGet(cell, cellSch->dl.time);
32419 if (rgSchDynTddInfo->isDynTddEnbld)
32421 RG_SCH_DYN_TDD_GET_SFIDX(dlCntrlSfIdx, rgSchDynTddInfo->crntDTddSfIdx,
32422 RG_SCH_CMN_DL_DELTA);
32423 if(RG_SCH_DYNTDD_DLC_ULD == rgSchDynTddInfo->sfInfo[dlCntrlSfIdx].sfType)
32425 if(1 == cell->cellId)
32427 ul5gtfsidDlAlreadyMarkUl++;
32429 printf("ul5gtfsidDlAlreadyMarkUl: %d, [sfn:sf] [%04d:%02d]\n",
32430 ul5gtfsidDlAlreadyMarkUl, cellSch->dl.time.sfn,
32431 cellSch->dl.time.slot);
32439 /* Specific DL scheduler to perform UE scheduling */
32440 cellSch->apisDl->rgSCHDlNewSched(cell, &cellSch->allocInfo);
32441 /* LTE_ADV_FLAG_REMOVED_END */
32443 /* call common allocator for RB Allocation */
32444 rgSCHCmnDlRbAlloc(cell, &cellSch->allocInfo);
32446 /* Finalize the Allocations for reqested Against alloced */
32447 rgSCHCmnDlAllocFnlz(cell);
32449 /* Perform Pdcch allocations for PDCCH Order Q.
32450 * As of now, giving this the least preference.
32451 * This func call could be moved above other allocations
32453 rgSCHCmnGenPdcchOrder(cell, dlSf);
32455 /* Do group power control for PUCCH */
32456 rgSCHCmnGrpPwrCntrlPucch(cell, dlSf);
32461 /**********************************************************************
32464 **********************************************************************/